diff --git a/.flake8 b/.flake8
index 89954f8bd..90316de21 100644
--- a/.flake8
+++ b/.flake8
@@ -1,31 +1,29 @@
# -*- coding: utf-8 -*-
-#
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# https://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-# Generated by synthtool. DO NOT EDIT!
+#
[flake8]
+# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333):
+# Resolve flake8 lint issues
ignore = E203, E231, E266, E501, W503
exclude =
- # Exclude environment test code.
- tests/environment/**
-
- # Exclude generated code.
- **/proto/**
+ # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333):
+ # Ensure that generated code passes flake8 lint
**/gapic/**
**/services/**
**/types/**
+ # Exclude Protobuf gencode
*_pb2.py
# Standard linting exemptions.
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index a3da1b0d4..6f1eaeb91 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,4 +1,4 @@
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,5 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7
-# created: 2023-08-02T10:53:29.114535628Z
+ digest: sha256:ecf409a43d8b157fb83c403de4d83e3da7d88e423044410c0e2434bf776221d1
+# created: 2025-04-10T16:21:41.67162455Z
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 2a3b42055..0738e11ee 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -5,8 +5,8 @@
# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json.
-# @googleapis/yoshi-python @googleapis/api-logging are the default owners for changes in this repo
-* @googleapis/yoshi-python @googleapis/api-logging
+# @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners are the default owners for changes in this repo
+* @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners
-# @googleapis/python-samples-reviewers @googleapis/api-logging are the default owners for samples changes
-/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging
+# @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners are the default owners for samples changes
+/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners
diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml
index a9d3f44e3..d5f69b10a 100644
--- a/.github/blunderbuss.yml
+++ b/.github/blunderbuss.yml
@@ -1,4 +1,20 @@
+# Blunderbuss config
+#
+# This file controls who is assigned for pull requests and issues.
+# Note: This file is autogenerated. To make changes to the assignee
+# team, please update `codeowner_team` in `.repo-metadata.json`.
assign_issues:
- - googleapis/api-logging-reviewers
+ - googleapis/api-logging
+ - googleapis/api-logging-partners
+
+assign_issues_by:
+ - labels:
+ - "samples"
+ to:
+ - googleapis/python-samples-reviewers
+ - googleapis/api-logging
+ - googleapis/api-logging-partners
+
assign_prs:
- - googleapis/api-logging-reviewers
+ - googleapis/api-logging
+ - googleapis/api-logging-partners
diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml
index d4ca94189..d47d146a9 100644
--- a/.github/release-trigger.yml
+++ b/.github/release-trigger.yml
@@ -1 +1,2 @@
enabled: true
+multiScmName: python-logging
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index 37438d33d..439a0bcb7 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -12,3 +12,7 @@ branchProtectionRules:
- 'Samples - Lint'
- 'Samples - Python 3.7'
- 'Samples - Python 3.8'
+ - 'Samples - Python 3.9'
+ - 'Samples - Python 3.10'
+ - 'Samples - Python 3.11'
+ - 'Samples - Python 3.12'
diff --git a/.gitignore b/.gitignore
index b4243ced7..d083ea1dd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,6 +50,7 @@ docs.metadata
# Virtual environment
env/
+venv/
# Test logs
coverage.xml
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index afa7a81aa..d41b45aa1 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,11 +15,13 @@
set -eo pipefail
+CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}")
+
if [[ -z "${PROJECT_ROOT:-}" ]]; then
- PROJECT_ROOT="github/python-logging"
+ PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..")
fi
-cd "${PROJECT_ROOT}"
+pushd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -28,17 +30,16 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Setup service account credentials.
-export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]]
+then
+ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+fi
# Setup project id.
-export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
-
-# Remove old nox
-python3 -m pip uninstall --yes --quiet nox-automation
-
-# Install nox
-python3 -m pip install --upgrade --quiet nox
-python3 -m nox --version
+if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]]
+then
+ export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+fi
# If this is a continuous build, send the test log to the FlakyBot.
# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
@@ -53,7 +54,7 @@ fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3 -m nox -s ${NOX_SESSION:-}
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
deleted file mode 100644
index 8e39a2cc4..000000000
--- a/.kokoro/docker/docs/Dockerfile
+++ /dev/null
@@ -1,83 +0,0 @@
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ubuntu:22.04
-
-ENV DEBIAN_FRONTEND noninteractive
-
-# Ensure local Python is preferred over distribution Python.
-ENV PATH /usr/local/bin:$PATH
-
-# Install dependencies.
-RUN apt-get update \
- && apt-get install -y --no-install-recommends \
- apt-transport-https \
- build-essential \
- ca-certificates \
- curl \
- dirmngr \
- git \
- gpg-agent \
- graphviz \
- libbz2-dev \
- libdb5.3-dev \
- libexpat1-dev \
- libffi-dev \
- liblzma-dev \
- libreadline-dev \
- libsnappy-dev \
- libssl-dev \
- libsqlite3-dev \
- portaudio19-dev \
- python3-distutils \
- redis-server \
- software-properties-common \
- ssh \
- sudo \
- tcl \
- tcl-dev \
- tk \
- tk-dev \
- uuid-dev \
- wget \
- zlib1g-dev \
- && add-apt-repository universe \
- && apt-get update \
- && apt-get -y install jq \
- && apt-get clean autoclean \
- && apt-get autoremove -y \
- && rm -rf /var/lib/apt/lists/* \
- && rm -f /var/cache/apt/archives/*.deb
-
-###################### Install python 3.9.13
-
-# Download python 3.9.13
-RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz
-
-# Extract files
-RUN tar -xvf Python-3.9.13.tgz
-
-# Install python 3.9.13
-RUN ./Python-3.9.13/configure --enable-optimizations
-RUN make altinstall
-
-###################### Install pip
-RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
- && python3 /tmp/get-pip.py \
- && rm /tmp/get-pip.py
-
-# Test pip
-RUN python3 -m pip
-
-CMD ["python3.8"]
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
deleted file mode 100644
index 36e4a6540..000000000
--- a/.kokoro/docs/common.cfg
+++ /dev/null
@@ -1,85 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-logging/.kokoro/trampoline_v2.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-logging/.kokoro/publish-docs.sh"
-}
-
-env_vars: {
- key: "STAGING_BUCKET"
- value: "docs-staging"
-}
-
-env_vars: {
- key: "V2_STAGING_BUCKET"
- # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2`
- value: "docs-staging-v2"
-}
-
-# It will upload the docker image after successful builds.
-env_vars: {
- key: "TRAMPOLINE_IMAGE_UPLOAD"
- value: "true"
-}
-
-# It will always build the docker image.
-env_vars: {
- key: "TRAMPOLINE_DOCKERFILE"
- value: ".kokoro/docker/docs/Dockerfile"
-}
-
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "docuploader_service_account"
- }
- }
-}
-
-#############################################
-# this section merged from .kokoro/common_env_vars.cfg using owlbot.py
-
-env_vars: {
- key: "PRODUCT_AREA_LABEL"
- value: "observability"
-}
-env_vars: {
- key: "PRODUCT_LABEL"
- value: "logging"
-}
-env_vars: {
- key: "LANGUAGE_LABEL"
- value: "python"
-}
-
-###################################################
-
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
deleted file mode 100644
index 3d5288bef..000000000
--- a/.kokoro/docs/docs-presubmit.cfg
+++ /dev/null
@@ -1,28 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "STAGING_BUCKET"
- value: "gcloud-python-test"
-}
-
-env_vars: {
- key: "V2_STAGING_BUCKET"
- value: "gcloud-python-test"
-}
-
-# We only upload the image in the main `docs` build.
-env_vars: {
- key: "TRAMPOLINE_IMAGE_UPLOAD"
- value: "false"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-logging/.kokoro/build.sh"
-}
-
-# Only run this nox session.
-env_vars: {
- key: "NOX_SESSION"
- value: "docs docfx"
-}
diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg
deleted file mode 100644
index 8f43917d9..000000000
--- a/.kokoro/docs/docs.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
index 6f3972140..c435402f4 100755
--- a/.kokoro/populate-secrets.sh
+++ b/.kokoro/populate-secrets.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2023 Google LLC.
+# Copyright 2024 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
deleted file mode 100755
index 9eafe0be3..000000000
--- a/.kokoro/publish-docs.sh
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-export PATH="${HOME}/.local/bin:${PATH}"
-
-# Install nox
-python3 -m pip install --require-hashes -r .kokoro/requirements.txt
-python3 -m nox --version
-
-# build docs
-nox -s docs
-
-# create metadata
-python3 -m docuploader create-metadata \
- --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3 setup.py --version) \
- --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3 setup.py --name) \
- --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
- --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
- --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
-
-cat docs.metadata
-
-# upload docs
-python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
-
-
-# docfx yaml files
-nox -s docfx
-
-# create metadata.
-python3 -m docuploader create-metadata \
- --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3 setup.py --version) \
- --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3 setup.py --name) \
- --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
- --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
- --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
-
-cat docs.metadata
-
-# upload docs
-python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
deleted file mode 100755
index 9bdfbceb5..000000000
--- a/.kokoro/release.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Start the releasetool reporter
-python3 -m pip install --require-hashes -r github/python-logging/.kokoro/requirements.txt
-python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1")
-cd github/python-logging
-python3 setup.py sdist bdist_wheel
-twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
deleted file mode 100644
index 4dc3167a5..000000000
--- a/.kokoro/release/common.cfg
+++ /dev/null
@@ -1,69 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-logging/.kokoro/trampoline.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-logging/.kokoro/release.sh"
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google-cloud-pypi-token-keystore-1"
- }
- }
-}
-
-# Tokens needed to report release status back to GitHub
-env_vars: {
- key: "SECRET_MANAGER_KEYS"
- value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
-}
-
-# Store the packages we uploaded to PyPI. That way, we have a record of exactly
-# what we published, which we can use to generate SBOMs and attestations.
-action {
- define_artifacts {
- regex: "github/python-logging/**/*.tar.gz"
- strip_prefix: "github/python-logging"
- }
-}
-
-
-#############################################
-# this section merged from .kokoro/common_env_vars.cfg using owlbot.py
-
-env_vars: {
- key: "PRODUCT_AREA_LABEL"
- value: "observability"
-}
-env_vars: {
- key: "PRODUCT_LABEL"
- value: "logging"
-}
-env_vars: {
- key: "LANGUAGE_LABEL"
- value: "python"
-}
-
-###################################################
-
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
deleted file mode 100644
index 8f43917d9..000000000
--- a/.kokoro/release/release.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in
deleted file mode 100644
index ec867d9fd..000000000
--- a/.kokoro/requirements.in
+++ /dev/null
@@ -1,10 +0,0 @@
-gcp-docuploader
-gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x
-importlib-metadata
-typing-extensions
-twine
-wheel
-setuptools
-nox>=2022.11.21 # required to remove dependency on py
-charset-normalizer<3
-click<8.1.0
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
deleted file mode 100644
index 029bd342d..000000000
--- a/.kokoro/requirements.txt
+++ /dev/null
@@ -1,496 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.9
-# by the following command:
-#
-# pip-compile --allow-unsafe --generate-hashes requirements.in
-#
-argcomplete==2.0.0 \
- --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \
- --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e
- # via nox
-attrs==22.1.0 \
- --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \
- --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c
- # via gcp-releasetool
-bleach==5.0.1 \
- --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \
- --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c
- # via readme-renderer
-cachetools==5.2.0 \
- --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \
- --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db
- # via google-auth
-certifi==2023.7.22 \
- --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \
- --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9
- # via requests
-cffi==1.15.1 \
- --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \
- --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \
- --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \
- --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \
- --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \
- --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \
- --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \
- --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \
- --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \
- --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \
- --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \
- --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \
- --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \
- --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \
- --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \
- --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \
- --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \
- --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \
- --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \
- --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \
- --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \
- --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \
- --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \
- --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \
- --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \
- --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \
- --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \
- --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \
- --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \
- --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \
- --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \
- --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \
- --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \
- --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \
- --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \
- --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \
- --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \
- --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \
- --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \
- --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \
- --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \
- --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \
- --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \
- --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \
- --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \
- --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \
- --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \
- --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \
- --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \
- --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \
- --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \
- --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \
- --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \
- --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \
- --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \
- --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \
- --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \
- --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \
- --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \
- --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \
- --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \
- --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \
- --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \
- --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0
- # via cryptography
-charset-normalizer==2.1.1 \
- --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
- --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
- # via
- # -r requirements.in
- # requests
-click==8.0.4 \
- --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \
- --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb
- # via
- # -r requirements.in
- # gcp-docuploader
- # gcp-releasetool
-colorlog==6.7.0 \
- --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \
- --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5
- # via
- # gcp-docuploader
- # nox
-commonmark==0.9.1 \
- --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \
- --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9
- # via rich
-cryptography==41.0.3 \
- --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \
- --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \
- --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \
- --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \
- --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \
- --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \
- --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \
- --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \
- --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \
- --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \
- --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \
- --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \
- --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \
- --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \
- --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \
- --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \
- --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \
- --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \
- --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \
- --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \
- --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \
- --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \
- --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de
- # via
- # gcp-releasetool
- # secretstorage
-distlib==0.3.6 \
- --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \
- --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e
- # via virtualenv
-docutils==0.19 \
- --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \
- --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc
- # via readme-renderer
-filelock==3.8.0 \
- --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \
- --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4
- # via virtualenv
-gcp-docuploader==0.6.4 \
- --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \
- --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf
- # via -r requirements.in
-gcp-releasetool==1.10.5 \
- --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \
- --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9
- # via -r requirements.in
-google-api-core==2.10.2 \
- --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \
- --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e
- # via
- # google-cloud-core
- # google-cloud-storage
-google-auth==2.14.1 \
- --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \
- --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016
- # via
- # gcp-releasetool
- # google-api-core
- # google-cloud-core
- # google-cloud-storage
-google-cloud-core==2.3.2 \
- --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \
- --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a
- # via google-cloud-storage
-google-cloud-storage==2.6.0 \
- --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \
- --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9
- # via gcp-docuploader
-google-crc32c==1.5.0 \
- --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \
- --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \
- --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \
- --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \
- --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \
- --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \
- --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \
- --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \
- --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \
- --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \
- --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \
- --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \
- --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \
- --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \
- --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \
- --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \
- --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \
- --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \
- --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \
- --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \
- --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \
- --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \
- --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \
- --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \
- --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \
- --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \
- --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \
- --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \
- --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \
- --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \
- --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \
- --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \
- --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \
- --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \
- --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \
- --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \
- --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \
- --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \
- --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \
- --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \
- --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \
- --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \
- --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \
- --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \
- --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \
- --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \
- --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \
- --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \
- --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \
- --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \
- --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \
- --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \
- --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \
- --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \
- --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \
- --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \
- --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \
- --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \
- --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \
- --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \
- --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \
- --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \
- --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \
- --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \
- --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \
- --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \
- --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \
- --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4
- # via google-resumable-media
-google-resumable-media==2.4.0 \
- --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \
- --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f
- # via google-cloud-storage
-googleapis-common-protos==1.57.0 \
- --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \
- --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c
- # via google-api-core
-idna==3.4 \
- --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
- --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
- # via requests
-importlib-metadata==5.0.0 \
- --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \
- --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43
- # via
- # -r requirements.in
- # keyring
- # twine
-jaraco-classes==3.2.3 \
- --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \
- --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a
- # via keyring
-jeepney==0.8.0 \
- --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \
- --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755
- # via
- # keyring
- # secretstorage
-jinja2==3.1.2 \
- --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \
- --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61
- # via gcp-releasetool
-keyring==23.11.0 \
- --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \
- --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361
- # via
- # gcp-releasetool
- # twine
-markupsafe==2.1.1 \
- --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \
- --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \
- --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \
- --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \
- --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \
- --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \
- --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \
- --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \
- --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \
- --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \
- --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \
- --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \
- --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \
- --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \
- --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \
- --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \
- --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \
- --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \
- --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \
- --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \
- --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \
- --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \
- --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \
- --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \
- --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \
- --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \
- --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \
- --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \
- --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \
- --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \
- --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \
- --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \
- --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \
- --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \
- --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \
- --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \
- --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \
- --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \
- --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \
- --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7
- # via jinja2
-more-itertools==9.0.0 \
- --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \
- --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab
- # via jaraco-classes
-nox==2022.11.21 \
- --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \
- --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684
- # via -r requirements.in
-packaging==21.3 \
- --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \
- --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522
- # via
- # gcp-releasetool
- # nox
-pkginfo==1.8.3 \
- --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \
- --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c
- # via twine
-platformdirs==2.5.4 \
- --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \
- --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10
- # via virtualenv
-protobuf==3.20.3 \
- --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \
- --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \
- --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \
- --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \
- --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \
- --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \
- --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \
- --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \
- --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \
- --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \
- --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \
- --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \
- --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \
- --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \
- --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \
- --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \
- --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \
- --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \
- --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \
- --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \
- --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \
- --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee
- # via
- # gcp-docuploader
- # gcp-releasetool
- # google-api-core
-pyasn1==0.4.8 \
- --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \
- --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba
- # via
- # pyasn1-modules
- # rsa
-pyasn1-modules==0.2.8 \
- --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \
- --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74
- # via google-auth
-pycparser==2.21 \
- --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
- --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
- # via cffi
-pygments==2.15.0 \
- --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \
- --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500
- # via
- # readme-renderer
- # rich
-pyjwt==2.6.0 \
- --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \
- --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14
- # via gcp-releasetool
-pyparsing==3.0.9 \
- --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \
- --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc
- # via packaging
-pyperclip==1.8.2 \
- --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57
- # via gcp-releasetool
-python-dateutil==2.8.2 \
- --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
- --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
- # via gcp-releasetool
-readme-renderer==37.3 \
- --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \
- --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343
- # via twine
-requests==2.31.0 \
- --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \
- --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1
- # via
- # gcp-releasetool
- # google-api-core
- # google-cloud-storage
- # requests-toolbelt
- # twine
-requests-toolbelt==0.10.1 \
- --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \
- --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d
- # via twine
-rfc3986==2.0.0 \
- --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \
- --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c
- # via twine
-rich==12.6.0 \
- --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \
- --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0
- # via twine
-rsa==4.9 \
- --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
- --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
- # via google-auth
-secretstorage==3.3.3 \
- --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \
- --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
- # via keyring
-six==1.16.0 \
- --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
- --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
- # via
- # bleach
- # gcp-docuploader
- # google-auth
- # python-dateutil
-twine==4.0.1 \
- --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \
- --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0
- # via -r requirements.in
-typing-extensions==4.4.0 \
- --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \
- --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e
- # via -r requirements.in
-urllib3==1.26.12 \
- --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \
- --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997
- # via
- # requests
- # twine
-virtualenv==20.16.7 \
- --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \
- --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29
- # via nox
-webencodings==0.5.1 \
- --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \
- --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923
- # via bleach
-wheel==0.38.4 \
- --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \
- --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8
- # via -r requirements.in
-zipp==3.10.0 \
- --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \
- --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8
- # via importlib-metadata
-
-# The following packages are considered to be unsafe in a requirements file:
-setuptools==65.5.1 \
- --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \
- --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f
- # via -r requirements.in
diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg
new file mode 100644
index 000000000..fb8ce8795
--- /dev/null
+++ b/.kokoro/samples/python3.12/common.cfg
@@ -0,0 +1,59 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.12"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-312"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+#############################################
+# this section merged from .kokoro/common_env_vars.cfg using owlbot.py
+
+env_vars: {
+ key: "PRODUCT_AREA_LABEL"
+ value: "observability"
+}
+env_vars: {
+ key: "PRODUCT_LABEL"
+ value: "logging"
+}
+env_vars: {
+ key: "LANGUAGE_LABEL"
+ value: "python"
+}
+
+###################################################
+
diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.12/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg
new file mode 100644
index 000000000..7e2973e3b
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg
new file mode 100644
index 000000000..71cd1e597
--- /dev/null
+++ b/.kokoro/samples/python3.12/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.12/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg
new file mode 100644
index 000000000..4eb8ee8be
--- /dev/null
+++ b/.kokoro/samples/python3.13/common.cfg
@@ -0,0 +1,60 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.13"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-313"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-logging/.kokoro/trampoline_v2.sh"
+
+
+#############################################
+# this section merged from .kokoro/common_env_vars.cfg using owlbot.py
+
+env_vars: {
+ key: "PRODUCT_AREA_LABEL"
+ value: "observability"
+}
+env_vars: {
+ key: "PRODUCT_LABEL"
+ value: "logging"
+}
+env_vars: {
+ key: "LANGUAGE_LABEL"
+ value: "python"
+}
+
+###################################################
+
diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.13/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg
new file mode 100644
index 000000000..7e2973e3b
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-logging/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg
new file mode 100644
index 000000000..71cd1e597
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.13/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
index 63ac41dfa..e9d8bd79a 100755
--- a/.kokoro/test-samples-against-head.sh
+++ b/.kokoro/test-samples-against-head.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index 5a0f5fab6..53e365bc4 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Install nox
-python3.9 -m pip install --upgrade --quiet nox
+# `virtualenv==20.26.6` is added for Python 3.7 compatibility
+python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6
# Use secrets acessor service account to get secrets
if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 50b35a48c..7933d8201 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index d85b1f267..48f796997 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
index 59a7cf3a9..35fa52923 100755
--- a/.kokoro/trampoline_v2.sh
+++ b/.kokoro/trampoline_v2.sh
@@ -1,5 +1,5 @@
#!/usr/bin/env bash
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 19409cbd3..1d74695f7 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,4 +1,4 @@
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -22,7 +22,7 @@ repos:
- id: end-of-file-fixer
- id: check-yaml
- repo: https://github.com/psf/black
- rev: 22.3.0
+ rev: 23.7.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 23efc1eaa..d235af2ce 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "3.6.0"
+ ".": "3.12.1"
}
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 0b6c0d8ca..83c212332 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -10,7 +10,7 @@
"repo": "googleapis/python-logging",
"distribution_name": "google-cloud-logging",
"api_id": "logging.googleapis.com",
- "codeowner_team": "@googleapis/api-logging",
+ "codeowner_team": "@googleapis/api-logging @googleapis/api-logging-partners",
"default_version": "v2",
"api_shortname": "logging",
"api_description": "allows you to store, search, analyze, monitor, and alert on log data and events from Google Cloud and Amazon Web Services. Using the BindPlane service, you can also collect this data from over 150 common application components, on-premises systems, and hybrid cloud systems. BindPlane is included with your Google Cloud project at no additional cost."
diff --git a/.trampolinerc b/.trampolinerc
index 65248f703..636e35c32 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -1,4 +1,4 @@
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 16e128b18..1f98b01a8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,163 @@
[1]: https://pypi.org/project/google-cloud-logging/#history
+## [3.12.1](https://github.com/googleapis/python-logging/compare/v3.12.0...v3.12.1) (2025-04-21)
+
+
+### Bug Fixes
+
+* Make logging handler close conditional to having the transport opened ([#990](https://github.com/googleapis/python-logging/issues/990)) ([66c6b91](https://github.com/googleapis/python-logging/commit/66c6b91725eb479a0af138a2be13f3c25f369d7e))
+
+## [3.12.0](https://github.com/googleapis/python-logging/compare/v3.11.4...v3.12.0) (2025-04-10)
+
+
+### Features
+
+* Add REST Interceptors which support reading metadata ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77))
+* Add support for opt-in debug logging ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77))
+* Added flushes/close functionality to logging handlers ([#917](https://github.com/googleapis/python-logging/issues/917)) ([d179304](https://github.com/googleapis/python-logging/commit/d179304b344277e349456f72cd90c56f28011286))
+
+
+### Bug Fixes
+
+* Allow protobuf 6.x ([#977](https://github.com/googleapis/python-logging/issues/977)) ([6757890](https://github.com/googleapis/python-logging/commit/675789001344fdae68ee20ec14e14c11c83a0433))
+* **deps:** Require google-cloud-audit-log >= 0.3.1 ([#979](https://github.com/googleapis/python-logging/issues/979)) ([1cc00ec](https://github.com/googleapis/python-logging/commit/1cc00ecf646a7a36eb32afd2e5df3d9aa7f564b1))
+* Fix typing issue with gRPC metadata when key ends in -bin ([681bcc5](https://github.com/googleapis/python-logging/commit/681bcc5c1f983bb5a43e1d5ebcdb14e5e3f25a77))
+
+
+### Documentation
+
+* Added documentation on log_level and excluded_loggers params in setup_logging ([#971](https://github.com/googleapis/python-logging/issues/971)) ([70d9d25](https://github.com/googleapis/python-logging/commit/70d9d25bf8c3c85a3c5523ecc7fbdbf72f08c583))
+* Update README to break infinite redirect loop ([#972](https://github.com/googleapis/python-logging/issues/972)) ([52cd907](https://github.com/googleapis/python-logging/commit/52cd907bb313df2766ec11e3d24c7e10cda31ca7))
+
+## [3.11.4](https://github.com/googleapis/python-logging/compare/v3.11.3...v3.11.4) (2025-01-22)
+
+
+### Bug Fixes
+
+* Made `write_entries` raise `ValueError` on `ParseError`s ([#958](https://github.com/googleapis/python-logging/issues/958)) ([5309478](https://github.com/googleapis/python-logging/commit/5309478c054d0f2b9301817fd835f2098f51dc3a))
+* Require proto-plus >= 1.25 for Python 3.13 ([#955](https://github.com/googleapis/python-logging/issues/955)) ([7baed8e](https://github.com/googleapis/python-logging/commit/7baed8e968f0bfa6abdbf0715dc43822f2fba8ba))
+* Require proto-plus >= 1.25 for Python 3.13 ([#955](https://github.com/googleapis/python-logging/issues/955)) ([002b1fc](https://github.com/googleapis/python-logging/commit/002b1fcb395d77d94d7216560c30015b9aefca81))
+
+## [3.11.3](https://github.com/googleapis/python-logging/compare/v3.11.2...v3.11.3) (2024-10-15)
+
+
+### Bug Fixes
+
+* 16-bit hexadecimal formatting for XCTC span IDs ([#946](https://github.com/googleapis/python-logging/issues/946)) ([1f2b190](https://github.com/googleapis/python-logging/commit/1f2b190c0d1a7125d9412c157915d0011cdd4c47))
+
+## [3.11.2](https://github.com/googleapis/python-logging/compare/v3.11.1...v3.11.2) (2024-08-15)
+
+
+### Bug Fixes
+
+* **deps:** Require google-cloud-appengine-logging>=0.1.3 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374))
+* **deps:** Require google-cloud-audit-log >= 0.2.4 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374))
+* **deps:** Require opentelemetry-api>=1.9.0 ([550abca](https://github.com/googleapis/python-logging/commit/550abca2846218d114a6b4b42cb165489e630374))
+* Fixed type hinting issue with specifying Transport class ([#930](https://github.com/googleapis/python-logging/issues/930)) ([e2875d6](https://github.com/googleapis/python-logging/commit/e2875d664c153a4328bd42790dfb7b4ac36a9048))
+
+## [3.11.1](https://github.com/googleapis/python-logging/compare/v3.11.0...v3.11.1) (2024-08-06)
+
+
+### Bug Fixes
+
+* Allow protobuf 5.x ([#888](https://github.com/googleapis/python-logging/issues/888)) ([7746e64](https://github.com/googleapis/python-logging/commit/7746e643af29b1008d6e6d6a9958c8337c958dd4))
+
+## [3.11.0](https://github.com/googleapis/python-logging/compare/v3.10.0...v3.11.0) (2024-07-15)
+
+
+### Features
+
+* OpenTelemetry trace/spanID integration for Python handlers ([#889](https://github.com/googleapis/python-logging/issues/889)) ([78168a3](https://github.com/googleapis/python-logging/commit/78168a38577b698130a861af4e4d229f42660330))
+
+
+### Bug Fixes
+
+* Added environment specific labels to client library when running in Cloud Run Jobs ([#877](https://github.com/googleapis/python-logging/issues/877)) ([9c5e8f0](https://github.com/googleapis/python-logging/commit/9c5e8f0548f88235fe6474469bc37685e2498dd1))
+* Added missing import into logger.py ([#896](https://github.com/googleapis/python-logging/issues/896)) ([9ca242d](https://github.com/googleapis/python-logging/commit/9ca242d10f9f3bca120b292f478d62f5fa1d3c06))
+* Added type hints to CloudLoggingHandler constructor ([#903](https://github.com/googleapis/python-logging/issues/903)) ([6959345](https://github.com/googleapis/python-logging/commit/69593459614be968f7a0136aa76701c4fc408834))
+
+
+### Documentation
+
+* Add summary_overview template ([#878](https://github.com/googleapis/python-logging/issues/878)) ([b60714c](https://github.com/googleapis/python-logging/commit/b60714cb1cc3aac79c86225f8f9cbd24d8ab170f))
+* Changed table in web-framework-integration to bulleted list ([#875](https://github.com/googleapis/python-logging/issues/875)) ([a4aa3a7](https://github.com/googleapis/python-logging/commit/a4aa3a7cf1e3bb32ec2772084a7dc6c16e1454ff))
+* Documentation update for OpenTelemetry ([#915](https://github.com/googleapis/python-logging/issues/915)) ([2a0539a](https://github.com/googleapis/python-logging/commit/2a0539a30e6dcf45c0970e3aacfd4a2772877526))
+* Update `dictConfig` snippet ([#885](https://github.com/googleapis/python-logging/issues/885)) ([6264107](https://github.com/googleapis/python-logging/commit/62641075042a3da9bb9c059d963bad14a1586b1c))
+
+## [3.10.0](https://github.com/googleapis/python-logging/compare/v3.9.0...v3.10.0) (2024-03-13)
+
+
+### Features
+
+* Allow users to explicitly configure universe domain ([#846](https://github.com/googleapis/python-logging/issues/846)) ([e998a21](https://github.com/googleapis/python-logging/commit/e998a219740cf8b2373e462867244a6860b0c88c))
+
+
+### Bug Fixes
+
+* Added placeholder kwargs to StructuredLogHandler ([#845](https://github.com/googleapis/python-logging/issues/845)) ([9bc0a37](https://github.com/googleapis/python-logging/commit/9bc0a37d910340d828db8bab33d67785f184f00c))
+* Allowed for a partial override of loggers that get excluded from setup_client ([#831](https://github.com/googleapis/python-logging/issues/831)) ([870c940](https://github.com/googleapis/python-logging/commit/870c9403e03d31a0f22dddc257cd5fb2b4fc5ee3))
+* Remove usage in including_default_value_fields to prepare for protobuf 5.x ([#866](https://github.com/googleapis/python-logging/issues/866)) ([66a534d](https://github.com/googleapis/python-logging/commit/66a534d1b83d7c63f5c7b013bf27ed54dd2786c3))
+* Use value of cluster-location in GKE for tagging location ([#830](https://github.com/googleapis/python-logging/issues/830)) ([c15847c](https://github.com/googleapis/python-logging/commit/c15847c215c18ad3970efba12f5d337e6d499883))
+
+
+### Documentation
+
+* Added documentation for Django/Flask integrations and dictConfig ([#848](https://github.com/googleapis/python-logging/issues/848)) ([c65ec92](https://github.com/googleapis/python-logging/commit/c65ec92bf348e2bcdd8f4c5bacc152cfb4737eb1))
+
+## [3.9.0](https://github.com/googleapis/python-logging/compare/v3.8.0...v3.9.0) (2023-12-08)
+
+
+### Features
+
+* Add support for Python 3.12 ([#813](https://github.com/googleapis/python-logging/issues/813)) ([6591b53](https://github.com/googleapis/python-logging/commit/6591b53e3fcd67e156765f329700443647b70349))
+* Use native namespaces instead of pkg_resources ([#812](https://github.com/googleapis/python-logging/issues/812)) ([10ad75d](https://github.com/googleapis/python-logging/commit/10ad75d2b9276df389f5069f9f143f8f4621d04d))
+
+
+### Bug Fixes
+
+* Fixed DeprecationWarning for datetime objects for Python 3.12 ([#824](https://github.com/googleapis/python-logging/issues/824)) ([2384981](https://github.com/googleapis/python-logging/commit/2384981c9137a57a647a69a32b67dcacd619ea0a))
+* Fixed object paths in autogenerated code in owlbot.py ([#804](https://github.com/googleapis/python-logging/issues/804)) ([b14bb14](https://github.com/googleapis/python-logging/commit/b14bb144fad2dcf067b7e62e402b708f45ebadbe))
+* Updated protobuf JSON formatting to support nested protobufs ([#797](https://github.com/googleapis/python-logging/issues/797)) ([a00c261](https://github.com/googleapis/python-logging/commit/a00c261ee07a5dcaac9f5b966b4bb6729a2bbe65))
+* Use `retry_async` instead of `retry` in async client ([#816](https://github.com/googleapis/python-logging/issues/816)) ([c79f7f5](https://github.com/googleapis/python-logging/commit/c79f7f55dddb170eac29f24b23bfe1dde8bfbda8))
+* Use warning instead of warn in system tests to avoid DeprecationWarning ([#821](https://github.com/googleapis/python-logging/issues/821)) ([c447175](https://github.com/googleapis/python-logging/commit/c4471758e1efee0e3599b08969449b2ce71bd1b4))
+
+## [3.8.0](https://github.com/googleapis/python-logging/compare/v3.7.0...v3.8.0) (2023-10-03)
+
+
+### Features
+
+* Add cloud_run_job monitored resource type. ([#788](https://github.com/googleapis/python-logging/issues/788)) ([3b310d6](https://github.com/googleapis/python-logging/commit/3b310d68b68df5bb31e21ac30b23207ef50c3f6f))
+
+## [3.7.0](https://github.com/googleapis/python-logging/compare/v3.6.0...v3.7.0) (2023-09-25)
+
+
+### Features
+
+* Add ConfigServiceV2.CreateBucketAsync method for creating Log Buckets asynchronously ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Add ConfigServiceV2.CreateLink method for creating linked datasets for Log Analytics Buckets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Add ConfigServiceV2.DeleteLink method for deleting linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Add ConfigServiceV2.GetLink methods for describing linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Add ConfigServiceV2.ListLinks method for listing linked datasets ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Add ConfigServiceV2.UpdateBucketAsync method for creating Log Buckets asynchronously ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Add LogBucket.analytics_enabled field that specifies whether Log Bucket's Analytics features are enabled ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Add LogBucket.index_configs field that contains a list of Log Bucket's indexed fields and related configuration data ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Log Analytics features of the Cloud Logging API ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+
+
+### Bug Fixes
+
+* Add async context manager return types ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Add severity to structured log write ([#783](https://github.com/googleapis/python-logging/issues/783)) ([31a7f69](https://github.com/googleapis/python-logging/commit/31a7f69ed94719546136a3bf1b3ecdb28e369414))
+* Handle exceptions raised when fetching Django request data ([#758](https://github.com/googleapis/python-logging/issues/758)) ([5ecf886](https://github.com/googleapis/python-logging/commit/5ecf88606b4f29b00ff8b18ae71c151d203d5c3b))
+* Unintended exception omittion ([#736](https://github.com/googleapis/python-logging/issues/736)) ([022dc54](https://github.com/googleapis/python-logging/commit/022dc545f781648043296b3ca04d835fcb6f1d7e))
+
+
+### Documentation
+
+* Documentation for the Log Analytics features of the Cloud Logging API ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+* Minor formatting ([30f24a8](https://github.com/googleapis/python-logging/commit/30f24a8bb3b0be2511264a18c3c93bdd3996fc93))
+
## [3.6.0](https://github.com/googleapis/python-logging/compare/v3.5.0...v3.6.0) (2023-07-05)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 6fa7a4dac..7bbacd5ca 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows.
+ 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.11 -- -k
+ $ nox -s unit-3.13 -- -k
.. note::
@@ -143,12 +143,12 @@ Running System Tests
$ nox -s system
# Run a single system test
- $ nox -s system-3.8 -- -k
+ $ nox -s system-3.12 -- -k
.. note::
- System tests are only configured to run under Python 3.8.
+ System tests are only configured to run under Python 3.12.
For expediency, we do not run them in older versions of Python 3.
This alone will not run the tests. You'll need to change some local
@@ -226,12 +226,16 @@ We support:
- `Python 3.9`_
- `Python 3.10`_
- `Python 3.11`_
+- `Python 3.12`_
+- `Python 3.13`_
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
.. _Python 3.10: https://docs.python.org/3.10/
.. _Python 3.11: https://docs.python.org/3.11/
+.. _Python 3.12: https://docs.python.org/3.12/
+.. _Python 3.13: https://docs.python.org/3.13/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/MANIFEST.in b/MANIFEST.in
index e0a667053..d6814cd60 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/README.rst b/README.rst
index 2618dc37a..d9549ed7d 100644
--- a/README.rst
+++ b/README.rst
@@ -14,7 +14,7 @@ Logging configuration.
.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg
:target: https://pypi.org/project/google-cloud-logging/
.. _Cloud Logging API: https://cloud.google.com/logging
-.. _Client Library Documentation: https://googleapis.dev/python/logging/latest
+.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/logging/latest/summary_overview
.. _Product Documentation: https://cloud.google.com/logging/docs
.. _Setting Up Cloud Logging for Python: https://cloud.google.com/logging/docs/setup/python
.. _Python's standard logging library: https://docs.python.org/2/library/logging.html
@@ -61,8 +61,8 @@ Python >= 3.7
Unsupported Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Python == 2.7. The last version of the library compatible with Python 2.7 is `google-cloud-logging==1.15.1`.
-Python == 3.6. The last version of the library compatible with Python 3.6 is `google-cloud-logging==3.1.2`.
+| Python == 2.7. The last version of the library compatible with Python 2.7 is ``google-cloud-logging==1.15.1``.
+| Python == 3.6. The last version of the library compatible with Python 3.6 is ``google-cloud-logging==3.1.2``.
Mac/Linux
diff --git a/docs/auto-trace-span-extraction.rst b/docs/auto-trace-span-extraction.rst
new file mode 100644
index 000000000..1eb21fb78
--- /dev/null
+++ b/docs/auto-trace-span-extraction.rst
@@ -0,0 +1,27 @@
+Automatic Trace/Span ID Extraction
+==================================
+
+.. note::
+ All `LogEntry fields`_ populated :ref:`manually` will override those populated via methods referred to in this
+ section.
+
+The Google Cloud Logging library can automatically populate `LogEntry fields`_
+`trace`, `span_id`, and `trace_sampled` via OpenTelemetry integration, or extracting header information from an HTTP request.
+
+OpenTelemetry Integration
+-------------------------
+
+If you have the OpenTelemetry SDK package installed and are logging from within an active OpenTelemetry span, that log entry will automatically
+have the `trace`, `span_id`, and `trace_sampled` fields populated from that span. More information about OpenTelemetry can be found
+`here `_.
+
+HTTP headers
+------------
+
+Another possible method of automatic `trace` / `span_id` is via extraction from HTTP headers.
+This is prioritized after OpenTelemetry and requires a :doc:`supported Python web framework `.
+Trace information is automatically populated from either the `W3C Traceparent `_
+or `X-Cloud-Trace-Context `_ headers.
+Populating trace information this way also automatically populates the `http_request` field in the `LogEntry` as well.
+
+.. _LogEntry fields: https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry
diff --git a/docs/conf.py b/docs/conf.py
index fffea8f16..a65cf85ff 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/docs/index.rst b/docs/index.rst
index 01d8e4eee..08f049c16 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -41,3 +41,8 @@ For a list of all ``google-cloud-logging`` releases:
:maxdepth: 2
changelog
+
+.. toctree::
+ :hidden:
+
+ summary_overview.md
diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst
index a485fce6d..cf00c37ae 100644
--- a/docs/std-lib-integration.rst
+++ b/docs/std-lib-integration.rst
@@ -16,6 +16,21 @@ call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~
:end-before: [END logging_handler_setup]
:dedent: 4
+
+You can also set the logging level threshold of the logging handler created by :meth:`~google.cloud.logging_v2.client.Client.setup_logging`,
+as well as set loggers excluded from the logger that is created:
+
+.. literalinclude:: ../samples/snippets/usage_guide.py
+ :start-after: [START logging_setup_logging]
+ :end-before: [END logging_setup_logging]
+ :dedent: 4
+
+.. literalinclude:: ../samples/snippets/usage_guide.py
+ :start-after: [START logging_setup_logging_excludes]
+ :end-before: [END logging_setup_logging_excludes]
+ :dedent: 4
+
+
This :meth:`~google.cloud.logging_v2.client.Client.setup_logging` function chooses the best configurations for the environment your
code is running on. For more information, see the `Google Cloud Logging documentation `_.
@@ -44,6 +59,16 @@ There are two supported handler classes to choose from:
to standard out, to be read and parsed by a GCP logging agent
- This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run
+Handler classes can also be specified via `dictConfig `_:
+
+.. literalinclude:: ../samples/snippets/usage_guide.py
+ :start-after: [START logging_dict_config]
+ :end-before: [END logging_dict_config]
+ :dedent: 4
+
+Note that since :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler` requires an already initialized :class:`~google.cloud.logging_v2.client.Client`,
+you must initialize a client and include it in the dictConfig entry for a `CloudLoggingHandler`.
+
Standard Library
---------------------------
@@ -92,32 +117,35 @@ The Google Cloud Logging library attempts to detect and attach additional
The following fields are currently supported:
- labels
-- trace*
-- span_id*
-- trace_sampled*
-- http_request*
+- trace
+- span_id
+- trace_sampled
+- http_request
- source_location
- resource
- :ref:`json_fields`
.. note::
- Fields marked with "*" require a supported Python web framework. The Google Cloud Logging
- library currently supports `flask `_ and `django `_
+ | More information about `trace`, `span_id`, and `trace_sampled` can be found :doc:`here `.
+ | `http_request` requires a :doc:`supported Python web framework `.
+
Manual Metadata Using the `extra` Argument
--------------------------------------------
+.. _Manual-Metadata:
+
The Python :mod:`logging` standard library accepts `an "extra" argument `_ when
writing logs. You can use this argument to populate LogRecord objects with user-defined
key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in additional
-metadata to populate `LogEntry fields `_.
+metadata to populate `LogEntry fields`_.
.. literalinclude:: ../samples/snippets/usage_guide.py
:start-after: [START logging_extras]
:end-before: [END logging_extras]
:dedent: 4
-All of the `LogEntry fields `_
+All of the `LogEntry fields`_
that can be :ref:`autodetected` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra`
argument override any :ref:`automatically detected` fields.
@@ -144,3 +172,5 @@ You can use both transport options over :doc:`gRPC or HTTP`.
.. note::
:class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler`
prints logs as formatted JSON to standard output, and does not use a Transport class.
+
+.. _LogEntry fields: https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry
\ No newline at end of file
diff --git a/docs/summary_overview.md b/docs/summary_overview.md
new file mode 100644
index 000000000..4786fbcaa
--- /dev/null
+++ b/docs/summary_overview.md
@@ -0,0 +1,22 @@
+[
+This is a templated file. Adding content to this file may result in it being
+reverted. Instead, if you want to place additional content, create an
+"overview_content.md" file in `docs/` directory. The Sphinx tool will
+pick up on the content and merge the content.
+]: #
+
+# Cloud Logging API
+
+Overview of the APIs available for Cloud Logging API.
+
+## All entries
+
+Classes, methods and properties & attributes for
+Cloud Logging API.
+
+[classes](https://cloud.google.com/python/docs/reference/logging/latest/summary_class.html)
+
+[methods](https://cloud.google.com/python/docs/reference/logging/latest/summary_method.html)
+
+[properties and
+attributes](https://cloud.google.com/python/docs/reference/logging/latest/summary_property.html)
diff --git a/docs/usage.rst b/docs/usage.rst
index 929ee9cef..c28be0c6f 100644
--- a/docs/usage.rst
+++ b/docs/usage.rst
@@ -4,6 +4,8 @@ Usage Guide
:maxdepth: 2
std-lib-integration
+ auto-trace-span-extraction
+ web-framework-integration
direct-lib-usage
grpc-vs-http
diff --git a/docs/web-framework-integration.rst b/docs/web-framework-integration.rst
new file mode 100644
index 000000000..d7bc3229d
--- /dev/null
+++ b/docs/web-framework-integration.rst
@@ -0,0 +1,29 @@
+Integration with Python Web Frameworks
+======================================
+
+The Google Cloud Logging library can integrate with Python web frameworks
+`flask `_ and `django `_ to
+automatically populate `LogEntry fields `_
+`trace`, `span_id`, `trace_sampled`, and `http_request`.
+
+Django
+------
+
+Django integration has been tested to work with each of the Django/Python versions listed `here `_.
+To enable Django integration, add `google.cloud.logging_v2.handlers.middleware.RequestMiddleware` to the list of `MIDDLEWARE`
+in your `settings `_ file. Also be sure to :doc:`set up logging ` in your settings file.
+
+Flask
+-----
+
+Flask integration has been tested to work with the following versions of Flask:
+
+- Python 3.7 - 3.9:
+
+ - Flask >=1.0.0
+
+- Python >=3.10:
+
+ - Flask >=1.0.3
+
+Be sure to :doc:`set up logging ` before declaring the Flask app.
diff --git a/google/__init__.py b/google/__init__.py
deleted file mode 100644
index 0e1bc5131..000000000
--- a/google/__init__.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-try:
- import pkg_resources
-
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
-
- __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/google/cloud/__init__.py b/google/cloud/__init__.py
deleted file mode 100644
index 0e1bc5131..000000000
--- a/google/cloud/__init__.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-try:
- import pkg_resources
-
- pkg_resources.declare_namespace(__name__)
-except ImportError:
- import pkgutil
-
- __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/google/cloud/logging/gapic_version.py b/google/cloud/logging/gapic_version.py
index d29522314..14833215c 100644
--- a/google/cloud/logging/gapic_version.py
+++ b/google/cloud/logging/gapic_version.py
@@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-__version__ = "3.6.0" # {x-release-please-version}
+__version__ = "3.12.1" # {x-release-please-version}
diff --git a/google/cloud/logging_v2/__init__.py b/google/cloud/logging_v2/__init__.py
index 9860f1e06..fac0b7d02 100644
--- a/google/cloud/logging_v2/__init__.py
+++ b/google/cloud/logging_v2/__init__.py
@@ -36,7 +36,7 @@
ASCENDING = "timestamp asc"
"""Query string to order by ascending timestamps."""
DESCENDING = "timestamp desc"
-"""Query string to order by decending timestamps."""
+"""Query string to order by descending timestamps."""
_instrumentation_emitted = False
"""Flag for whether instrumentation info has been emitted"""
diff --git a/google/cloud/logging_v2/_gapic.py b/google/cloud/logging_v2/_gapic.py
index f6f6dca1f..379665248 100644
--- a/google/cloud/logging_v2/_gapic.py
+++ b/google/cloud/logging_v2/_gapic.py
@@ -30,6 +30,7 @@
from google.protobuf.json_format import MessageToDict
from google.protobuf.json_format import ParseDict
+from google.protobuf.json_format import ParseError
from google.cloud.logging_v2._helpers import entry_from_resource
from google.cloud.logging_v2.sink import Sink
@@ -151,7 +152,10 @@ def write_entries(
Useful for checking whether the logging API endpoints are working
properly before sending valuable data.
"""
- log_entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries]
+ try:
+ log_entry_pbs = [_log_entry_mapping_to_pb(entry) for entry in entries]
+ except ParseError as e:
+ raise ValueError(f"Invalid log entry: {str(e)}") from e
request = WriteLogEntriesRequest(
log_name=logger_name,
@@ -271,7 +275,6 @@ def sink_create(
return MessageToDict(
LogSink.pb(created_pb),
preserving_proto_field_name=False,
- including_default_value_fields=False,
)
def sink_get(self, sink_name):
@@ -298,7 +301,6 @@ def sink_get(self, sink_name):
return MessageToDict(
LogSink.pb(sink_pb),
preserving_proto_field_name=False,
- including_default_value_fields=False,
)
def sink_update(
@@ -333,7 +335,7 @@ def sink_update(
dict: The sink resource returned from the API (converted from a
protobuf to a dictionary).
"""
- name = sink_name.split("/")[-1] # parse name out of full resoure name
+ name = sink_name.split("/")[-1] # parse name out of full resource name
sink_pb = LogSink(
name=name,
filter=filter_,
@@ -351,7 +353,6 @@ def sink_update(
return MessageToDict(
LogSink.pb(sink_pb),
preserving_proto_field_name=False,
- including_default_value_fields=False,
)
def sink_delete(self, sink_name):
@@ -459,7 +460,6 @@ def metric_get(self, project, metric_name):
return MessageToDict(
LogMetric.pb(metric_pb),
preserving_proto_field_name=False,
- including_default_value_fields=False,
)
def metric_update(
@@ -496,7 +496,6 @@ def metric_update(
return MessageToDict(
LogMetric.pb(metric_pb),
preserving_proto_field_name=False,
- including_default_value_fields=False,
)
def metric_delete(self, project, metric_name):
@@ -530,7 +529,6 @@ def _parse_log_entry(entry_pb):
return MessageToDict(
entry_pb,
preserving_proto_field_name=False,
- including_default_value_fields=False,
)
except TypeError:
if entry_pb.HasField("proto_payload"):
@@ -539,7 +537,6 @@ def _parse_log_entry(entry_pb):
entry_mapping = MessageToDict(
entry_pb,
preserving_proto_field_name=False,
- including_default_value_fields=False,
)
entry_mapping["protoPayload"] = proto_payload
return entry_mapping
diff --git a/google/cloud/logging_v2/_http.py b/google/cloud/logging_v2/_http.py
index 581dce35e..c629b8d92 100644
--- a/google/cloud/logging_v2/_http.py
+++ b/google/cloud/logging_v2/_http.py
@@ -26,7 +26,6 @@
class Connection(_http.JSONConnection):
-
DEFAULT_API_ENDPOINT = "https://logging.googleapis.com"
def __init__(self, client, *, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT):
@@ -348,7 +347,7 @@ def sink_update(
dict: The returned (updated) resource.
"""
target = f"/{sink_name}"
- name = sink_name.split("/")[-1] # parse name out of full resoure name
+ name = sink_name.split("/")[-1] # parse name out of full resource name
data = {"name": name, "filter": filter_, "destination": destination}
query_params = {"uniqueWriterIdentity": unique_writer_identity}
return self.api_request(
diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py
index 94c1e6ca7..f52845ee5 100644
--- a/google/cloud/logging_v2/client.py
+++ b/google/cloud/logging_v2/client.py
@@ -149,6 +149,8 @@ def __init__(
else:
self._use_grpc = _use_grpc
+ self._handlers = set()
+
@property
def logging_api(self):
"""Helper for logging-related API calls.
@@ -400,7 +402,8 @@ def setup_logging(
loggers, will report to Cloud Logging.
Args:
- log_level (Optional[int]): Python logging log level. Defaults to
+ log_level (Optional[int]): The logging level threshold of the attached logger,
+ as set by the :meth:`logging.Logger.setLevel` method. Defaults to
:const:`logging.INFO`.
excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the
handler to. This will always include the
@@ -410,4 +413,17 @@ def setup_logging(
dict: keyword args passed to handler constructor
"""
handler = self.get_default_handler(**kw)
+ self._handlers.add(handler)
setup_logging(handler, log_level=log_level, excluded_loggers=excluded_loggers)
+
+ def flush_handlers(self):
+ """Flushes all Python log handlers associated with this Client."""
+
+ for handler in self._handlers:
+ handler.flush()
+
+ def close(self):
+ """Closes the Client and all handlers associated with this Client."""
+ super(Client, self).close()
+ for handler in self._handlers:
+ handler.close()
diff --git a/google/cloud/logging_v2/entries.py b/google/cloud/logging_v2/entries.py
index 9db020f67..d8a877738 100644
--- a/google/cloud/logging_v2/entries.py
+++ b/google/cloud/logging_v2/entries.py
@@ -18,9 +18,9 @@
import json
import re
-from google.protobuf.any_pb2 import Any
from google.protobuf.json_format import MessageToDict
from google.protobuf.json_format import Parse
+from google.protobuf.message import Message
from google.cloud.logging_v2.resource import Resource
from google.cloud._helpers import _name_from_project_path
@@ -325,7 +325,7 @@ def _extract_payload(cls, resource):
@property
def payload_pb(self):
- if isinstance(self.payload, Any):
+ if isinstance(self.payload, Message):
return self.payload
@property
@@ -337,10 +337,10 @@ def to_api_repr(self):
"""API repr (JSON format) for entry."""
info = super(ProtobufEntry, self).to_api_repr()
proto_payload = None
- if self.payload_json:
- proto_payload = dict(self.payload_json)
- elif self.payload_pb:
- proto_payload = MessageToDict(self.payload_pb)
+ if self.payload_pb:
+ proto_payload = MessageToDict(self.payload)
+ elif self.payload_json:
+ proto_payload = dict(self.payload)
info["protoPayload"] = proto_payload
return info
diff --git a/google/cloud/logging_v2/gapic_version.py b/google/cloud/logging_v2/gapic_version.py
index d29522314..14833215c 100644
--- a/google/cloud/logging_v2/gapic_version.py
+++ b/google/cloud/logging_v2/gapic_version.py
@@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-__version__ = "3.6.0" # {x-release-please-version}
+__version__ = "3.12.1" # {x-release-please-version}
diff --git a/google/cloud/logging_v2/handlers/_helpers.py b/google/cloud/logging_v2/handlers/_helpers.py
index 32e70dfdd..ff5838e05 100644
--- a/google/cloud/logging_v2/handlers/_helpers.py
+++ b/google/cloud/logging_v2/handlers/_helpers.py
@@ -24,6 +24,8 @@
except ImportError: # pragma: NO COVER
flask = None
+import opentelemetry.trace
+
from google.cloud.logging_v2.handlers.middleware.request import _get_django_request
_DJANGO_CONTENT_LENGTH = "CONTENT_LENGTH"
@@ -66,7 +68,7 @@ def get_request_data_from_flask():
Returns:
Tuple[Optional[dict], Optional[str], Optional[str], bool]:
Data related to the current http request, trace_id, span_id and trace_sampled
- for the request. All fields will be None if a django request isn't found.
+ for the request. All fields will be None if a Flask request isn't found.
"""
if flask is None or not flask.request:
return None, None, None, False
@@ -104,10 +106,17 @@ def get_request_data_from_django():
if request is None:
return None, None, None, False
+ # Django can raise django.core.exceptions.DisallowedHost here for a
+ # malformed HTTP_HOST header. But we don't want to import Django modules.
+ try:
+ request_url = request.build_absolute_uri()
+ except Exception:
+ request_url = None
+
# build http_request
http_request = {
"requestMethod": request.method,
- "requestUrl": request.build_absolute_uri(),
+ "requestUrl": request_url,
"userAgent": request.META.get(_DJANGO_USERAGENT_HEADER),
"protocol": request.META.get(_PROTOCOL_HEADER),
}
@@ -165,13 +174,22 @@ def _parse_xcloud_trace(header):
Args:
header (str): the string extracted from the X_CLOUD_TRACE header
Returns:
- Tuple[Optional[dict], Optional[str], bool]:
+ Tuple[Optional[str], Optional[str], bool]:
The trace_id, span_id and trace_sampled extracted from the header
Each field will be None if not found.
"""
trace_id = span_id = None
trace_sampled = False
- # see https://cloud.google.com/trace/docs/setup for X-Cloud-Trace_Context format
+
+ # As per the format described at https://cloud.google.com/trace/docs/trace-context#legacy-http-header
+ # "X-Cloud-Trace-Context: TRACE_ID[/SPAN_ID][;o=OPTIONS]"
+ # for example:
+ # "X-Cloud-Trace-Context: 105445aa7843bc8bf206b12000100000/1;o=1"
+ #
+ # We expect:
+ # * trace_id (optional, 128-bit hex string): "105445aa7843bc8bf206b12000100000"
+ # * span_id (optional, 16-bit hex string): "0000000000000001" (needs to be converted into 16 bit hex string)
+ # * trace_sampled (optional, bool): true
if header:
try:
regex = r"([\w-]+)?(\/?([\w-]+))?(;?o=(\d))?"
@@ -179,28 +197,87 @@ def _parse_xcloud_trace(header):
trace_id = match.group(1)
span_id = match.group(3)
trace_sampled = match.group(5) == "1"
+
+ # Convert the span ID to 16-bit hexadecimal instead of decimal
+ try:
+ span_id_int = int(span_id)
+ if span_id_int > 0 and span_id_int < 2**64:
+ span_id = f"{span_id_int:016x}"
+ else:
+ span_id = None
+ except (ValueError, TypeError):
+ span_id = None
+
except IndexError:
pass
return trace_id, span_id, trace_sampled
+def _retrieve_current_open_telemetry_span():
+ """Helper to retrieve trace, span ID, and trace sampled information from the current
+ OpenTelemetry span.
+
+ Returns:
+ Tuple[Optional[str], Optional[str], bool]:
+ Data related to the current trace_id, span_id, and trace_sampled for the
+ current OpenTelemetry span. If a span is not found, return None/False for all
+ fields.
+ """
+ span = opentelemetry.trace.get_current_span()
+ if span != opentelemetry.trace.span.INVALID_SPAN:
+ context = span.get_span_context()
+ trace_id = opentelemetry.trace.format_trace_id(context.trace_id)
+ span_id = opentelemetry.trace.format_span_id(context.span_id)
+ trace_sampled = context.trace_flags.sampled
+
+ return trace_id, span_id, trace_sampled
+
+ return None, None, False
+
+
def get_request_data():
"""Helper to get http_request and trace data from supported web
- frameworks (currently supported: Flask and Django).
+ frameworks (currently supported: Flask and Django), as well as OpenTelemetry. Attempts
+ to retrieve trace/spanID from OpenTelemetry first, before going to Traceparent then XCTC.
+ HTTP request data is taken from a supporting web framework (currently Flask or Django).
+ Because HTTP request data is decoupled from OpenTelemetry, it is possible to get as a
+ return value the HTTP request from the web framework of choice, and trace/span data from
+ OpenTelemetry, even if trace data is present in the HTTP request headers.
Returns:
Tuple[Optional[dict], Optional[str], Optional[str], bool]:
Data related to the current http request, trace_id, span_id, and trace_sampled
for the request. All fields will be None if a http request isn't found.
"""
+
+ (
+ otel_trace_id,
+ otel_span_id,
+ otel_trace_sampled,
+ ) = _retrieve_current_open_telemetry_span()
+
+ # Get HTTP request data
checkers = (
get_request_data_from_django,
get_request_data_from_flask,
)
- for checker in checkers:
- http_request, trace_id, span_id, trace_sampled = checker()
- if http_request is not None:
- return http_request, trace_id, span_id, trace_sampled
+ http_request, http_trace_id, http_span_id, http_trace_sampled = (
+ None,
+ None,
+ None,
+ False,
+ )
- return None, None, None, False
+ for checker in checkers:
+ http_request, http_trace_id, http_span_id, http_trace_sampled = checker()
+ if http_request is None:
+ http_trace_id, http_span_id, http_trace_sampled = None, None, False
+ else:
+ break
+
+ # otel_trace_id existing means the other return values are non-null
+ if otel_trace_id:
+ return http_request, otel_trace_id, otel_span_id, otel_trace_sampled
+ else:
+ return http_request, http_trace_id, http_span_id, http_trace_sampled
diff --git a/google/cloud/logging_v2/handlers/_monitored_resources.py b/google/cloud/logging_v2/handlers/_monitored_resources.py
index a5b8dfee3..5240fe746 100644
--- a/google/cloud/logging_v2/handlers/_monitored_resources.py
+++ b/google/cloud/logging_v2/handlers/_monitored_resources.py
@@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import functools
+import logging
import os
from google.cloud.logging_v2.resource import Resource
@@ -26,11 +28,21 @@
_CLOUD_RUN_SERVICE_ID = "K_SERVICE"
_CLOUD_RUN_REVISION_ID = "K_REVISION"
_CLOUD_RUN_CONFIGURATION_ID = "K_CONFIGURATION"
-_CLOUD_RUN_ENV_VARS = [
+_CLOUD_RUN_SERVICE_ENV_VARS = [
_CLOUD_RUN_SERVICE_ID,
_CLOUD_RUN_REVISION_ID,
_CLOUD_RUN_CONFIGURATION_ID,
]
+_CLOUD_RUN_JOB_ID = "CLOUD_RUN_JOB"
+_CLOUD_RUN_EXECUTION_ID = "CLOUD_RUN_EXECUTION"
+_CLOUD_RUN_TASK_INDEX = "CLOUD_RUN_TASK_INDEX"
+_CLOUD_RUN_TASK_ATTEMPT = "CLOUD_RUN_TASK_ATTEMPT"
+_CLOUD_RUN_JOB_ENV_VARS = [
+ _CLOUD_RUN_JOB_ID,
+ _CLOUD_RUN_EXECUTION_ID,
+ _CLOUD_RUN_TASK_INDEX,
+ _CLOUD_RUN_TASK_ATTEMPT,
+]
"""Environment variables set in Cloud Run environment."""
_FUNCTION_TARGET = "FUNCTION_TARGET"
@@ -51,9 +63,26 @@
_GKE_CLUSTER_NAME = "instance/attributes/cluster-name"
"""Attribute in metadata server when in GKE environment."""
+_GKE_CLUSTER_LOCATION = "instance/attributes/cluster-location"
+"""Attribute in metadata server when in GKE environment."""
+
_PROJECT_NAME = "project/project-id"
"""Attribute in metadata server when in GKE environment."""
+_GAE_RESOURCE_TYPE = "gae_app"
+"""Resource type for App Engine environment."""
+
+_CLOUD_RUN_JOB_RESOURCE_TYPE = "cloud_run_job"
+"""Resource type for Cloud Run Jobs."""
+
+_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id"
+"""Extra trace label to be added on App Engine environments"""
+
+_CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL = "run.googleapis.com/execution_name"
+_CLOUD_RUN_JOBS_TASK_INDEX_LABEL = "run.googleapis.com/task_index"
+_CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL = "run.googleapis.com/task_attempt"
+"""Extra labels for Cloud Run environments to be recognized by Cloud Run Jobs web UI."""
+
def _create_functions_resource():
"""Create a standardized Cloud Functions resource.
@@ -84,7 +113,7 @@ def _create_kubernetes_resource():
Returns:
google.cloud.logging.Resource
"""
- zone = retrieve_metadata_server(_ZONE_ID)
+ location = retrieve_metadata_server(_GKE_CLUSTER_LOCATION)
cluster_name = retrieve_metadata_server(_GKE_CLUSTER_NAME)
project = retrieve_metadata_server(_PROJECT_NAME)
@@ -92,7 +121,7 @@ def _create_kubernetes_resource():
type="k8s_container",
labels={
"project_id": project if project else "",
- "location": zone if zone else "",
+ "location": location if location else "",
"cluster_name": cluster_name if cluster_name else "",
},
)
@@ -118,8 +147,8 @@ def _create_compute_resource():
return resource
-def _create_cloud_run_resource():
- """Create a standardized Cloud Run resource.
+def _create_cloud_run_service_resource():
+ """Create a standardized Cloud Run service resource.
Returns:
google.cloud.logging.Resource
"""
@@ -138,6 +167,24 @@ def _create_cloud_run_resource():
return resource
+def _create_cloud_run_job_resource():
+ """Create a standardized Cloud Run job resource.
+ Returns:
+ google.cloud.logging.Resource
+ """
+ region = retrieve_metadata_server(_REGION_ID)
+ project = retrieve_metadata_server(_PROJECT_NAME)
+ resource = Resource(
+ type=_CLOUD_RUN_JOB_RESOURCE_TYPE,
+ labels={
+ "project_id": project if project else "",
+ "job_name": os.environ.get(_CLOUD_RUN_JOB_ID, ""),
+ "location": region.split("/")[-1] if region else "",
+ },
+ )
+ return resource
+
+
def _create_app_engine_resource():
"""Create a standardized App Engine resource.
Returns:
@@ -146,7 +193,7 @@ def _create_app_engine_resource():
zone = retrieve_metadata_server(_ZONE_ID)
project = retrieve_metadata_server(_PROJECT_NAME)
resource = Resource(
- type="gae_app",
+ type=_GAE_RESOURCE_TYPE,
labels={
"project_id": project if project else "",
"module_id": os.environ.get(_GAE_SERVICE_ENV, ""),
@@ -190,12 +237,67 @@ def detect_resource(project=""):
):
# Cloud Functions
return _create_functions_resource()
- elif all([env in os.environ for env in _CLOUD_RUN_ENV_VARS]):
+ elif all([env in os.environ for env in _CLOUD_RUN_SERVICE_ENV_VARS]):
+ # Cloud Run
+ return _create_cloud_run_service_resource()
+ elif all([env in os.environ for env in _CLOUD_RUN_JOB_ENV_VARS]):
# Cloud Run
- return _create_cloud_run_resource()
+ return _create_cloud_run_job_resource()
elif gce_instance_name is not None:
# Compute Engine
return _create_compute_resource()
else:
# use generic global resource
return _create_global_resource(project)
+
+
+@functools.lru_cache(maxsize=None)
+def _get_environmental_labels(resource_type):
+ """Builds a dictionary of labels to be inserted into a LogRecord of the given resource type.
+ This function should only build a dict of items that are consistent across multiple logging statements
+ of the same resource type, such as environment variables. Th
+
+ Returns:
+ dict:
+ A dict representation of labels and the values of those labels
+ """
+ labels = {}
+ environ_vars = {
+ _CLOUD_RUN_JOB_RESOURCE_TYPE: {
+ _CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL: _CLOUD_RUN_EXECUTION_ID,
+ _CLOUD_RUN_JOBS_TASK_INDEX_LABEL: _CLOUD_RUN_TASK_INDEX,
+ _CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL: _CLOUD_RUN_TASK_ATTEMPT,
+ }
+ }
+
+ if resource_type in environ_vars:
+ for key, env_var in environ_vars[resource_type].items():
+ val = os.environ.get(env_var, "")
+ if val:
+ labels[key] = val
+
+ return labels
+
+
+def add_resource_labels(resource: Resource, record: logging.LogRecord):
+ """Returns additional labels to be appended on to a LogRecord object based on the
+ local environment. Defaults to an empty dictionary if none apply. This is only to be
+ used for CloudLoggingHandler, as the structured logging daemon already does this.
+
+ Args:
+ resource (google.cloud.logging.Resource): Resource based on the environment
+ record (logging.LogRecord): A LogRecord object representing a log record
+ Returns:
+ Dict[str, str]: New labels to append to the labels of the LogRecord
+ """
+ if not resource:
+ return None
+
+ # Get environmental labels from the resource type
+ labels = _get_environmental_labels(resource.type)
+
+ # Add labels from log record
+ if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None:
+ labels[_GAE_TRACE_ID_LABEL] = record._trace
+
+ return labels
diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py
index 28960ae71..233d9eab3 100644
--- a/google/cloud/logging_v2/handlers/handlers.py
+++ b/google/cloud/logging_v2/handlers/handlers.py
@@ -18,30 +18,38 @@
import json
import logging
-from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport
-from google.cloud.logging_v2.handlers._monitored_resources import detect_resource
+from typing import Optional, IO, Type
+
+from google.cloud.logging_v2.handlers.transports import (
+ BackgroundThreadTransport,
+ Transport,
+)
+from google.cloud.logging_v2.handlers._monitored_resources import (
+ detect_resource,
+ add_resource_labels,
+)
from google.cloud.logging_v2.handlers._helpers import get_request_data
+from google.cloud.logging_v2.resource import Resource
+
DEFAULT_LOGGER_NAME = "python"
-"""Exclude internal logs from propagating through handlers"""
+"""Defaults for filtering out noisy loggers"""
EXCLUDED_LOGGER_DEFAULTS = (
+ "google.api_core.bidi",
+ "werkzeug",
+)
+
+"""Exclude internal logs from propagating through handlers"""
+_INTERNAL_LOGGERS = (
"google.cloud",
"google.auth",
"google_auth_httplib2",
- "google.api_core.bidi",
- "werkzeug",
)
"""These environments require us to remove extra handlers on setup"""
_CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function")
-"""Extra trace label to be added on App Engine environments"""
-_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id"
-
-"""Resource name for App Engine environments"""
-_GAE_RESOURCE_TYPE = "gae_app"
-
class CloudLoggingFilter(logging.Filter):
"""Python standard ``logging`` Filter class to add Cloud Logging
@@ -70,7 +78,7 @@ def _infer_source_location(record):
("function", "funcName"),
]
output = {}
- for (gcp_name, std_lib_name) in name_map:
+ for gcp_name, std_lib_name in name_map:
value = getattr(record, std_lib_name, None)
if value is not None:
output[gcp_name] = value
@@ -148,11 +156,12 @@ def __init__(
self,
client,
*,
- name=DEFAULT_LOGGER_NAME,
- transport=BackgroundThreadTransport,
- resource=None,
- labels=None,
- stream=None,
+ name: str = DEFAULT_LOGGER_NAME,
+ transport: Type[Transport] = BackgroundThreadTransport,
+ resource: Resource = None,
+ labels: Optional[dict] = None,
+ stream: Optional[IO] = None,
+ **kwargs,
):
"""
Args:
@@ -179,7 +188,10 @@ def __init__(
resource = detect_resource(client.project)
self.name = name
self.client = client
+ client._handlers.add(self)
self.transport = transport(client, name, resource=resource)
+ self._transport_open = True
+ self._transport_cls = transport
self.project_id = client.project
self.resource = resource
self.labels = labels
@@ -201,10 +213,15 @@ def emit(self, record):
labels = record._labels
message = _format_and_parse_message(record, self)
- if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None:
- # add GAE-specific label
- labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})}
+ labels = {**add_resource_labels(resource, record), **(labels or {})} or None
+
# send off request
+ if not self._transport_open:
+ self.transport = self._transport_cls(
+ self.client, self.name, resource=self.resource
+ )
+ self._transport_open = True
+
self.transport.send(
record,
message,
@@ -217,6 +234,22 @@ def emit(self, record):
source_location=record._source_location,
)
+ def flush(self):
+ """Forces the Transport object to submit any pending log records.
+
+ For SyncTransport, this is a no-op.
+ """
+ super(CloudLoggingHandler, self).flush()
+ if self._transport_open:
+ self.transport.flush()
+
+ def close(self):
+ """Closes the log handler and cleans up all Transport objects used."""
+ if self._transport_open:
+ self.transport.close()
+ self.transport = None
+ self._transport_open = False
+
def _format_and_parse_message(record, formatter_handler):
"""
@@ -288,10 +321,11 @@ def setup_logging(
excluded_loggers (Optional[Tuple[str]]): The loggers to not attach the handler
to. This will always include the loggers in the
path of the logging client itself.
- log_level (Optional[int]): Python logging log level. Defaults to
+ log_level (Optional[int]): The logging level threshold of the attached logger,
+ as set by the :meth:`logging.Logger.setLevel` method. Defaults to
:const:`logging.INFO`.
"""
- all_excluded_loggers = set(excluded_loggers + EXCLUDED_LOGGER_DEFAULTS)
+ all_excluded_loggers = set(excluded_loggers + _INTERNAL_LOGGERS)
logger = logging.getLogger()
# remove built-in handlers on App Engine or Cloud Functions environments
diff --git a/google/cloud/logging_v2/handlers/structured_log.py b/google/cloud/logging_v2/handlers/structured_log.py
index fac9b26b3..dcba02c9c 100644
--- a/google/cloud/logging_v2/handlers/structured_log.py
+++ b/google/cloud/logging_v2/handlers/structured_log.py
@@ -63,7 +63,13 @@ class StructuredLogHandler(logging.StreamHandler):
"""
def __init__(
- self, *, labels=None, stream=None, project_id=None, json_encoder_cls=None
+ self,
+ *,
+ labels=None,
+ stream=None,
+ project_id=None,
+ json_encoder_cls=None,
+ **kwargs
):
"""
Args:
@@ -79,8 +85,18 @@ def __init__(
log_filter = CloudLoggingFilter(project=project_id, default_labels=labels)
self.addFilter(log_filter)
+ class _Formatter(logging.Formatter):
+ """Formatter to format log message without traceback"""
+
+ def format(self, record):
+ """Ignore exception info to avoid duplicating it
+ https://github.com/googleapis/python-logging/issues/382
+ """
+ record.message = record.getMessage()
+ return self.formatMessage(record)
+
# make logs appear in GCP structured logging format
- self._gcp_formatter = logging.Formatter(GCP_FORMAT)
+ self._gcp_formatter = _Formatter(GCP_FORMAT)
self._json_encoder_cls = json_encoder_cls or json.JSONEncoder
@@ -115,11 +131,7 @@ def format(self, record):
payload = '"message": {},'.format(encoded_message)
record._payload_str = payload or ""
- # remove exception info to avoid duplicating it
- # https://github.com/googleapis/python-logging/issues/382
- record.exc_info = None
- record.exc_text = None
- # convert to GCP structred logging format
+ # convert to GCP structured logging format
gcp_payload = self._gcp_formatter.format(record)
return gcp_payload
diff --git a/google/cloud/logging_v2/handlers/transports/background_thread.py b/google/cloud/logging_v2/handlers/transports/background_thread.py
index f361e043c..021112fdb 100644
--- a/google/cloud/logging_v2/handlers/transports/background_thread.py
+++ b/google/cloud/logging_v2/handlers/transports/background_thread.py
@@ -38,6 +38,13 @@
_WORKER_TERMINATOR = object()
_LOGGER = logging.getLogger(__name__)
+_CLOSE_THREAD_SHUTDOWN_ERROR_MSG = (
+ "CloudLoggingHandler shutting down, cannot send logs entries to Cloud Logging due to "
+ "inconsistent threading behavior at shutdown. To avoid this issue, flush the logging handler "
+ "manually or switch to StructuredLogHandler. You can also close the CloudLoggingHandler manually "
+ "via handler.close or client.close."
+)
+
def _get_many(queue_, *, max_items=None, max_latency=0):
"""Get multiple items from a Queue.
@@ -140,9 +147,11 @@ def _thread_main(self):
else:
batch.log(**item)
- self._safely_commit_batch(batch)
+ # We cannot commit logs upstream if the main thread is shutting down
+ if threading.main_thread().is_alive():
+ self._safely_commit_batch(batch)
- for _ in items:
+ for it in items:
self._queue.task_done()
_LOGGER.debug("Background thread exited gracefully.")
@@ -162,7 +171,7 @@ def start(self):
)
self._thread.daemon = True
self._thread.start()
- atexit.register(self._main_thread_terminated)
+ atexit.register(self._handle_exit)
def stop(self, *, grace_period=None):
"""Signals the background thread to stop.
@@ -202,26 +211,26 @@ def stop(self, *, grace_period=None):
return success
- def _main_thread_terminated(self):
- """Callback that attempts to send pending logs before termination."""
+ def _close(self, close_msg):
+ """Callback that attempts to send pending logs before termination if the main thread is alive."""
if not self.is_alive:
return
if not self._queue.empty():
- print(
- "Program shutting down, attempting to send %d queued log "
- "entries to Cloud Logging..." % (self._queue.qsize(),),
- file=sys.stderr,
- )
+ print(close_msg, file=sys.stderr)
- if self.stop(grace_period=self._grace_period):
+ if threading.main_thread().is_alive() and self.stop(
+ grace_period=self._grace_period
+ ):
print("Sent all pending logs.", file=sys.stderr)
- else:
+ elif not self._queue.empty():
print(
"Failed to send %d pending logs." % (self._queue.qsize(),),
file=sys.stderr,
)
+ self._thread = None
+
def enqueue(self, record, message, **kwargs):
"""Queues a log entry to be written by the background thread.
@@ -240,7 +249,9 @@ def enqueue(self, record, message, **kwargs):
queue_entry = {
"message": message,
"severity": _helpers._normalize_severity(record.levelno),
- "timestamp": datetime.datetime.utcfromtimestamp(record.created),
+ "timestamp": datetime.datetime.fromtimestamp(
+ record.created, datetime.timezone.utc
+ ),
}
queue_entry.update(kwargs)
self._queue.put_nowait(queue_entry)
@@ -249,6 +260,26 @@ def flush(self):
"""Submit any pending log records."""
self._queue.join()
+ def close(self):
+ """Signals the worker thread to stop, then closes the transport thread.
+
+ This call will attempt to send pending logs before termination, and
+ should be followed up by disowning the transport object.
+ """
+ atexit.unregister(self._handle_exit)
+ self._close(
+ "Background thread shutting down, attempting to send %d queued log "
+ "entries to Cloud Logging..." % (self._queue.qsize(),)
+ )
+
+ def _handle_exit(self):
+ """Handle system exit.
+
+ Since we cannot send pending logs during system shutdown due to thread errors,
+ log an error message to stderr to notify the user.
+ """
+ self._close(_CLOSE_THREAD_SHUTDOWN_ERROR_MSG)
+
class BackgroundThreadTransport(Transport):
"""Asynchronous transport that uses a background thread."""
@@ -283,6 +314,7 @@ def __init__(
"""
self.client = client
logger = self.client.logger(name, resource=resource)
+ self.grace_period = grace_period
self.worker = _Worker(
logger,
grace_period=grace_period,
@@ -305,3 +337,7 @@ def send(self, record, message, **kwargs):
def flush(self):
"""Submit any pending log records."""
self.worker.flush()
+
+ def close(self):
+ """Closes the worker thread."""
+ self.worker.close()
diff --git a/google/cloud/logging_v2/handlers/transports/base.py b/google/cloud/logging_v2/handlers/transports/base.py
index a0c9aafa4..31e8f418a 100644
--- a/google/cloud/logging_v2/handlers/transports/base.py
+++ b/google/cloud/logging_v2/handlers/transports/base.py
@@ -51,3 +51,11 @@ def flush(self):
For blocking/sync transports, this is a no-op.
"""
+ pass
+
+ def close(self):
+ """Closes the transport and cleans up resources used by it.
+
+ This call should be followed up by disowning the transport.
+ """
+ pass
diff --git a/google/cloud/logging_v2/handlers/transports/sync.py b/google/cloud/logging_v2/handlers/transports/sync.py
index 6f93b2e57..6bf91f8da 100644
--- a/google/cloud/logging_v2/handlers/transports/sync.py
+++ b/google/cloud/logging_v2/handlers/transports/sync.py
@@ -14,7 +14,7 @@
"""Transport for Python logging handler.
-Logs directly to the the Cloud Logging API with a synchronous call.
+Logs directly to the Cloud Logging API with a synchronous call.
"""
from google.cloud.logging_v2 import _helpers
from google.cloud.logging_v2.handlers.transports.base import Transport
@@ -59,3 +59,10 @@ def send(self, record, message, **kwargs):
labels=labels,
**kwargs,
)
+
+ def close(self):
+ """Closes the transport and cleans up resources used by it.
+
+ This call is usually followed up by cleaning up the reference to the transport.
+ """
+ self.logger = None
diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py
index 88424b27c..eaa8d2d36 100644
--- a/google/cloud/logging_v2/logger.py
+++ b/google/cloud/logging_v2/logger.py
@@ -29,6 +29,7 @@
from google.api_core.exceptions import InvalidArgument
from google.rpc.error_details_pb2 import DebugInfo
+import google.cloud.logging_v2
import google.protobuf.message
_GLOBAL_RESOURCE = Resource(type="global", labels={})
@@ -161,6 +162,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw):
api_repr = entry.to_api_repr()
entries = [api_repr]
+
if google.cloud.logging_v2._instrumentation_emitted is False:
entries = _add_instrumentation(entries, **kw)
google.cloud.logging_v2._instrumentation_emitted = True
@@ -199,18 +201,38 @@ def log_text(self, text, *, client=None, **kw):
self._do_log(client, TextEntry, text, **kw)
def log_struct(self, info, *, client=None, **kw):
- """Log a dictionary message
+ """Logs a dictionary message.
See
https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write
+ The message must be able to be serializable to a Protobuf Struct.
+ It must be a dictionary of strings to one of the following:
+
+ - :class:`str`
+ - :class:`int`
+ - :class:`float`
+ - :class:`bool`
+ - :class:`list[str|float|int|bool|list|dict|None]`
+ - :class:`dict[str, str|float|int|bool|list|dict|None]`
+
+ For more details on Protobuf structs, see https://protobuf.dev/reference/protobuf/google.protobuf/#value.
+ If the provided dictionary cannot be serialized into a Protobuf struct,
+ it will not be logged, and a :class:`ValueError` will be raised.
+
Args:
- info (dict): the log entry information
+ info (dict[str, str|float|int|bool|list|dict|None]):
+ the log entry information.
client (Optional[~logging_v2.client.Client]):
The client to use. If not passed, falls back to the
``client`` stored on the current sink.
kw (Optional[dict]): additional keyword arguments for the entry.
See :class:`~logging_v2.entries.LogEntry`.
+
+ Raises:
+ ValueError:
+ if the dictionary message provided cannot be serialized into a Protobuf
+ struct.
"""
for field in _STRUCT_EXTRACTABLE_FIELDS:
# attempt to copy relevant fields from the payload into the LogEntry body
@@ -359,7 +381,7 @@ def __init__(self, logger, client, *, resource=None):
Args:
logger (logging_v2.logger.Logger):
the logger to which entries will be logged.
- client (~logging_V2.client.Cilent):
+ client (~logging_V2.client.Client):
The client to use.
resource (Optional[~logging_v2.resource.Resource]):
Monitored resource of the batch, defaults
@@ -404,8 +426,22 @@ def log_text(self, text, **kw):
def log_struct(self, info, **kw):
"""Add a struct entry to be logged during :meth:`commit`.
+ The message must be able to be serializable to a Protobuf Struct.
+ It must be a dictionary of strings to one of the following:
+
+ - :class:`str`
+ - :class:`int`
+ - :class:`float`
+ - :class:`bool`
+ - :class:`list[str|float|int|bool|list|dict|None]`
+ - :class:`dict[str, str|float|int|bool|list|dict|None]`
+
+ For more details on Protobuf structs, see https://protobuf.dev/reference/protobuf/google.protobuf/#value.
+ If the provided dictionary cannot be serialized into a Protobuf struct,
+ it will not be logged, and a :class:`ValueError` will be raised during :meth:`commit`.
+
Args:
- info (dict): The struct entry,
+ info (dict[str, str|float|int|bool|list|dict|None]): The struct entry,
kw (Optional[dict]): Additional keyword arguments for the entry.
See :class:`~logging_v2.entries.LogEntry`.
"""
@@ -450,6 +486,10 @@ def commit(self, *, client=None, partial_success=True):
Whether a batch's valid entries should be written even
if some other entry failed due to a permanent error such
as INVALID_ARGUMENT or PERMISSION_DENIED.
+
+ Raises:
+ ValueError:
+ if one of the messages in the batch cannot be successfully parsed.
"""
if client is None:
client = self.client
diff --git a/google/cloud/logging_v2/services/__init__.py b/google/cloud/logging_v2/services/__init__.py
index 89a37dc92..cbf94b283 100644
--- a/google/cloud/logging_v2/services/__init__.py
+++ b/google/cloud/logging_v2/services/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/services/config_service_v2/__init__.py b/google/cloud/logging_v2/services/config_service_v2/__init__.py
index bf3043949..187d00d52 100644
--- a/google/cloud/logging_v2/services/config_service_v2/__init__.py
+++ b/google/cloud/logging_v2/services/config_service_v2/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py
index e066569f7..ad681a9c7 100644
--- a/google/cloud/logging_v2/services/config_service_v2/async_client.py
+++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,11 +13,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging as std_logging
from collections import OrderedDict
-import functools
import re
from typing import (
Dict,
+ Callable,
Mapping,
MutableMapping,
MutableSequence,
@@ -33,14 +34,16 @@
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
-from google.api_core import retry as retries
+from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
+
try:
- OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
+ OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
- OptionalRetry = Union[retries.Retry, object] # type: ignore
+ OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
@@ -54,14 +57,27 @@
from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport
from .client import ConfigServiceV2Client
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
class ConfigServiceV2AsyncClient:
"""Service for configuring sinks used to route log entries."""
_client: ConfigServiceV2Client
+ # Copy defaults from the synchronous client for use here.
+ # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
DEFAULT_ENDPOINT = ConfigServiceV2Client.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ _DEFAULT_ENDPOINT_TEMPLATE = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE
+ _DEFAULT_UNIVERSE = ConfigServiceV2Client._DEFAULT_UNIVERSE
cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path)
parse_cmek_settings_path = staticmethod(
@@ -184,19 +200,40 @@ def transport(self) -> ConfigServiceV2Transport:
"""
return self._client.transport
- get_transport_class = functools.partial(
- type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client)
- )
+ @property
+ def api_endpoint(self):
+ """Return the API endpoint used by the client instance.
+
+ Returns:
+ str: The API endpoint used by the client instance.
+ """
+ return self._client._api_endpoint
+
+ @property
+ def universe_domain(self) -> str:
+ """Return the universe domain used by the client instance.
+
+ Returns:
+ str: The universe domain used
+ by the client instance.
+ """
+ return self._client._universe_domain
+
+ get_transport_class = ConfigServiceV2Client.get_transport_class
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
- transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio",
+ transport: Optional[
+ Union[
+ str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]
+ ]
+ ] = "grpc_asyncio",
client_options: Optional[ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiates the config service v2 client.
+ """Instantiates the config service v2 async client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -204,26 +241,43 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.ConfigServiceV2Transport]): The
- transport to use. If set to None, a transport is chosen
- automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
+ transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]):
+ The transport to use, or a Callable that constructs and returns a new transport to use.
+ If a Callable is given, it will be called with the same set of initialization
+ arguments as used in the ConfigServiceV2Transport constructor.
+ If set to None, a transport is chosen automatically.
+ client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+ Custom options for the client.
+
+ 1. The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client when ``transport`` is
+ not explicitly provided. Only if this property is not set and
+ ``transport`` was not explicitly provided, the endpoint is
+ determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+ variable, which have one of the following values:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ use the default regular endpoint) and "auto" (auto-switch to the
+ default mTLS endpoint if client certificate is present; this is
+ the default value).
+
+ 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
+ to provide a client certificate for mTLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
+ 3. The ``universe_domain`` property can be used to override the
+ default "googleapis.com" universe. Note that ``api_endpoint``
+ property still takes precedence; and ``universe_domain`` is
+ currently not supported for mTLS.
+
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
@@ -235,6 +289,28 @@ def __init__(
client_info=client_info,
)
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.logging_v2.ConfigServiceV2AsyncClient`.",
+ extra={
+ "serviceName": "google.logging.v2.ConfigServiceV2",
+ "universeDomain": getattr(
+ self._client._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._client._transport, "_credentials")
+ else {
+ "serviceName": "google.logging.v2.ConfigServiceV2",
+ "credentialsType": None,
+ },
+ )
+
async def list_buckets(
self,
request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None,
@@ -242,7 +318,7 @@ async def list_buckets(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListBucketsAsyncPager:
r"""Lists log buckets.
@@ -294,11 +370,13 @@ async def sample_list_buckets():
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager:
@@ -310,16 +388,22 @@ async def sample_list_buckets():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.ListBucketsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.ListBucketsRequest):
+ request = logging_config.ListBucketsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -328,11 +412,9 @@ async def sample_list_buckets():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_buckets,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_buckets
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -340,6 +422,9 @@ async def sample_list_buckets():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -354,6 +439,8 @@ async def sample_list_buckets():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -366,7 +453,7 @@ async def get_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogBucket:
r"""Gets a log bucket.
@@ -399,11 +486,13 @@ async def sample_get_bucket():
Args:
request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]):
The request object. The parameters to ``GetBucket``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogBucket:
@@ -412,15 +501,16 @@ async def sample_get_bucket():
"""
# Create or coerce a protobuf request object.
- request = logging_config.GetBucketRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.GetBucketRequest):
+ request = logging_config.GetBucketRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_bucket,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.get_bucket
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -428,6 +518,9 @@ async def sample_get_bucket():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -445,7 +538,7 @@ async def create_bucket_async(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation_async.AsyncOperation:
r"""Creates a log bucket asynchronously that can be used
to store log entries.
@@ -486,11 +579,13 @@ async def sample_create_bucket_async():
Args:
request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]):
The request object. The parameters to ``CreateBucket``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation_async.AsyncOperation:
@@ -502,15 +597,16 @@ async def sample_create_bucket_async():
"""
# Create or coerce a protobuf request object.
- request = logging_config.CreateBucketRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.CreateBucketRequest):
+ request = logging_config.CreateBucketRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.create_bucket_async,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.create_bucket_async
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -518,6 +614,9 @@ async def sample_create_bucket_async():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -543,7 +642,7 @@ async def update_bucket_async(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation_async.AsyncOperation:
r"""Updates a log bucket asynchronously.
@@ -586,11 +685,13 @@ async def sample_update_bucket_async():
Args:
request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]):
The request object. The parameters to ``UpdateBucket``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation_async.AsyncOperation:
@@ -602,15 +703,16 @@ async def sample_update_bucket_async():
"""
# Create or coerce a protobuf request object.
- request = logging_config.UpdateBucketRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.UpdateBucketRequest):
+ request = logging_config.UpdateBucketRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.update_bucket_async,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.update_bucket_async
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -618,6 +720,9 @@ async def sample_update_bucket_async():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -643,7 +748,7 @@ async def create_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogBucket:
r"""Creates a log bucket that can be used to store log
entries. After a bucket has been created, the bucket's
@@ -679,11 +784,13 @@ async def sample_create_bucket():
Args:
request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]):
The request object. The parameters to ``CreateBucket``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogBucket:
@@ -692,15 +799,16 @@ async def sample_create_bucket():
"""
# Create or coerce a protobuf request object.
- request = logging_config.CreateBucketRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.CreateBucketRequest):
+ request = logging_config.CreateBucketRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.create_bucket,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.create_bucket
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -708,6 +816,9 @@ async def sample_create_bucket():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -725,7 +836,7 @@ async def update_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogBucket:
r"""Updates a log bucket.
@@ -764,11 +875,13 @@ async def sample_update_bucket():
Args:
request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]):
The request object. The parameters to ``UpdateBucket``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogBucket:
@@ -777,15 +890,16 @@ async def sample_update_bucket():
"""
# Create or coerce a protobuf request object.
- request = logging_config.UpdateBucketRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.UpdateBucketRequest):
+ request = logging_config.UpdateBucketRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.update_bucket,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.update_bucket
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -793,6 +907,9 @@ async def sample_update_bucket():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -810,7 +927,7 @@ async def delete_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes a log bucket.
@@ -845,22 +962,25 @@ async def sample_delete_bucket():
Args:
request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]):
The request object. The parameters to ``DeleteBucket``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- request = logging_config.DeleteBucketRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.DeleteBucketRequest):
+ request = logging_config.DeleteBucketRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_bucket,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.delete_bucket
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -868,6 +988,9 @@ async def sample_delete_bucket():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -882,7 +1005,7 @@ async def undelete_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Undeletes a log bucket. A bucket that has been
deleted can be undeleted within the grace period of 7
@@ -914,22 +1037,25 @@ async def sample_undelete_bucket():
Args:
request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]):
The request object. The parameters to ``UndeleteBucket``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- request = logging_config.UndeleteBucketRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.UndeleteBucketRequest):
+ request = logging_config.UndeleteBucketRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.undelete_bucket,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.undelete_bucket
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -937,6 +1063,9 @@ async def sample_undelete_bucket():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -952,7 +1081,7 @@ async def list_views(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListViewsAsyncPager:
r"""Lists views on a log bucket.
@@ -996,11 +1125,13 @@ async def sample_list_views():
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager:
@@ -1012,16 +1143,22 @@ async def sample_list_views():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.ListViewsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.ListViewsRequest):
+ request = logging_config.ListViewsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -1030,11 +1167,9 @@ async def sample_list_views():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_views,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_views
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1042,6 +1177,9 @@ async def sample_list_views():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1056,6 +1194,8 @@ async def sample_list_views():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -1068,7 +1208,7 @@ async def get_view(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogView:
r"""Gets a view on a log bucket..
@@ -1101,11 +1241,13 @@ async def sample_get_view():
Args:
request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]):
The request object. The parameters to ``GetView``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogView:
@@ -1114,15 +1256,14 @@ async def sample_get_view():
"""
# Create or coerce a protobuf request object.
- request = logging_config.GetViewRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.GetViewRequest):
+ request = logging_config.GetViewRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_view,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[self._client._transport.get_view]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1130,6 +1271,9 @@ async def sample_get_view():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1147,7 +1291,7 @@ async def create_view(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogView:
r"""Creates a view over log entries in a log bucket. A
bucket may contain a maximum of 30 views.
@@ -1182,11 +1326,13 @@ async def sample_create_view():
Args:
request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]):
The request object. The parameters to ``CreateView``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogView:
@@ -1195,15 +1341,16 @@ async def sample_create_view():
"""
# Create or coerce a protobuf request object.
- request = logging_config.CreateViewRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.CreateViewRequest):
+ request = logging_config.CreateViewRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.create_view,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.create_view
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1211,6 +1358,9 @@ async def sample_create_view():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1228,7 +1378,7 @@ async def update_view(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogView:
r"""Updates a view on a log bucket. This method replaces the
following fields in the existing view with values from the new
@@ -1265,11 +1415,13 @@ async def sample_update_view():
Args:
request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]):
The request object. The parameters to ``UpdateView``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogView:
@@ -1278,15 +1430,16 @@ async def sample_update_view():
"""
# Create or coerce a protobuf request object.
- request = logging_config.UpdateViewRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.UpdateViewRequest):
+ request = logging_config.UpdateViewRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.update_view,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.update_view
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1294,6 +1447,9 @@ async def sample_update_view():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1311,7 +1467,7 @@ async def delete_view(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is
returned, this indicates that system is not in a state where it
@@ -1344,22 +1500,25 @@ async def sample_delete_view():
Args:
request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]):
The request object. The parameters to ``DeleteView``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- request = logging_config.DeleteViewRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.DeleteViewRequest):
+ request = logging_config.DeleteViewRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_view,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.delete_view
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1367,6 +1526,9 @@ async def sample_delete_view():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -1382,7 +1544,7 @@ async def list_sinks(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListSinksAsyncPager:
r"""Lists sinks.
@@ -1430,11 +1592,13 @@ async def sample_list_sinks():
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager:
@@ -1445,16 +1609,22 @@ async def sample_list_sinks():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.ListSinksRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.ListSinksRequest):
+ request = logging_config.ListSinksRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -1463,22 +1633,9 @@ async def sample_list_sinks():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_sinks,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_sinks
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1486,6 +1643,9 @@ async def sample_list_sinks():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1500,6 +1660,8 @@ async def sample_list_sinks():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -1513,7 +1675,7 @@ async def get_sink(
sink_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogSink:
r"""Gets a sink.
@@ -1563,11 +1725,13 @@ async def sample_get_sink():
This corresponds to the ``sink_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogSink:
@@ -1583,16 +1747,22 @@ async def sample_get_sink():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([sink_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [sink_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.GetSinkRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.GetSinkRequest):
+ request = logging_config.GetSinkRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -1601,22 +1771,7 @@ async def sample_get_sink():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_sink,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[self._client._transport.get_sink]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1626,6 +1781,9 @@ async def sample_get_sink():
),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1645,7 +1803,7 @@ async def create_sink(
sink: Optional[logging_config.LogSink] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogSink:
r"""Creates a sink that exports specified log entries to a
destination. The export of newly-ingested log entries begins
@@ -1711,11 +1869,13 @@ async def sample_create_sink():
This corresponds to the ``sink`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogSink:
@@ -1731,16 +1891,22 @@ async def sample_create_sink():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent, sink])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent, sink]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.CreateSinkRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.CreateSinkRequest):
+ request = logging_config.CreateSinkRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -1751,11 +1917,9 @@ async def sample_create_sink():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.create_sink,
- default_timeout=120.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.create_sink
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1763,6 +1927,9 @@ async def sample_create_sink():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1783,7 +1950,7 @@ async def update_sink(
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogSink:
r"""Updates a sink. This method replaces the following fields in the
existing sink with values from the new sink: ``destination``,
@@ -1873,11 +2040,13 @@ async def sample_update_sink():
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogSink:
@@ -1893,16 +2062,22 @@ async def sample_update_sink():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([sink_name, sink, update_mask])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [sink_name, sink, update_mask]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.UpdateSinkRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.UpdateSinkRequest):
+ request = logging_config.UpdateSinkRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -1915,22 +2090,9 @@ async def sample_update_sink():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.update_sink,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.update_sink
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1940,6 +2102,9 @@ async def sample_update_sink():
),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1958,7 +2123,7 @@ async def delete_sink(
sink_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes a sink. If the sink has a unique ``writer_identity``,
then that service account is also deleted.
@@ -2007,23 +2172,31 @@ async def sample_delete_sink():
This corresponds to the ``sink_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([sink_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [sink_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.DeleteSinkRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.DeleteSinkRequest):
+ request = logging_config.DeleteSinkRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -2032,22 +2205,9 @@ async def sample_delete_sink():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_sink,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.delete_sink
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -2057,6 +2217,9 @@ async def sample_delete_sink():
),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -2074,7 +2237,7 @@ async def create_link(
link_id: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation_async.AsyncOperation:
r"""Asynchronously creates a linked dataset in BigQuery
which makes it possible to use BigQuery to read the logs
@@ -2142,11 +2305,13 @@ async def sample_create_link():
This corresponds to the ``link_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation_async.AsyncOperation:
@@ -2158,16 +2323,22 @@ async def sample_create_link():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent, link, link_id])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent, link, link_id]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.CreateLinkRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.CreateLinkRequest):
+ request = logging_config.CreateLinkRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -2180,11 +2351,9 @@ async def sample_create_link():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.create_link,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.create_link
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -2192,6 +2361,9 @@ async def sample_create_link():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -2218,7 +2390,7 @@ async def delete_link(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes a link. This will also delete the
corresponding BigQuery linked dataset.
@@ -2267,11 +2439,13 @@ async def sample_delete_link():
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation_async.AsyncOperation:
@@ -2290,16 +2464,22 @@ async def sample_delete_link():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.DeleteLinkRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.DeleteLinkRequest):
+ request = logging_config.DeleteLinkRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -2308,11 +2488,9 @@ async def sample_delete_link():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_link,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.delete_link
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -2320,6 +2498,9 @@ async def sample_delete_link():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -2346,7 +2527,7 @@ async def list_links(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListLinksAsyncPager:
r"""Lists links.
@@ -2392,11 +2573,13 @@ async def sample_list_links():
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager:
@@ -2408,16 +2591,22 @@ async def sample_list_links():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.ListLinksRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.ListLinksRequest):
+ request = logging_config.ListLinksRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -2426,11 +2615,9 @@ async def sample_list_links():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_links,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_links
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -2438,6 +2625,9 @@ async def sample_list_links():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -2452,6 +2642,8 @@ async def sample_list_links():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -2465,7 +2657,7 @@ async def get_link(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.Link:
r"""Gets a link.
@@ -2509,11 +2701,13 @@ async def sample_get_link():
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.Link:
@@ -2522,16 +2716,22 @@ async def sample_get_link():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.GetLinkRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.GetLinkRequest):
+ request = logging_config.GetLinkRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -2540,11 +2740,7 @@ async def sample_get_link():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_link,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[self._client._transport.get_link]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -2552,6 +2748,9 @@ async def sample_get_link():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -2570,7 +2769,7 @@ async def list_exclusions(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListExclusionsAsyncPager:
r"""Lists all the exclusions on the \_Default sink in a parent
resource.
@@ -2619,11 +2818,13 @@ async def sample_list_exclusions():
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager:
@@ -2634,16 +2835,22 @@ async def sample_list_exclusions():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.ListExclusionsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.ListExclusionsRequest):
+ request = logging_config.ListExclusionsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -2652,22 +2859,9 @@ async def sample_list_exclusions():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_exclusions,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_exclusions
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -2675,6 +2869,9 @@ async def sample_list_exclusions():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -2689,6 +2886,8 @@ async def sample_list_exclusions():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -2702,7 +2901,7 @@ async def get_exclusion(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogExclusion:
r"""Gets the description of an exclusion in the \_Default sink.
@@ -2752,11 +2951,13 @@ async def sample_get_exclusion():
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogExclusion:
@@ -2770,16 +2971,22 @@ async def sample_get_exclusion():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.GetExclusionRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.GetExclusionRequest):
+ request = logging_config.GetExclusionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -2788,22 +2995,9 @@ async def sample_get_exclusion():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_exclusion,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.get_exclusion
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -2811,6 +3005,9 @@ async def sample_get_exclusion():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -2830,7 +3027,7 @@ async def create_exclusion(
exclusion: Optional[logging_config.LogExclusion] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogExclusion:
r"""Creates a new exclusion in the \_Default sink in a specified
parent resource. Only log entries belonging to that resource can
@@ -2897,11 +3094,13 @@ async def sample_create_exclusion():
This corresponds to the ``exclusion`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogExclusion:
@@ -2915,16 +3114,22 @@ async def sample_create_exclusion():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent, exclusion])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent, exclusion]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.CreateExclusionRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.CreateExclusionRequest):
+ request = logging_config.CreateExclusionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -2935,11 +3140,9 @@ async def sample_create_exclusion():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.create_exclusion,
- default_timeout=120.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.create_exclusion
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -2947,6 +3150,9 @@ async def sample_create_exclusion():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -2967,7 +3173,7 @@ async def update_exclusion(
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogExclusion:
r"""Changes one or more properties of an existing exclusion in the
\_Default sink.
@@ -3045,11 +3251,13 @@ async def sample_update_exclusion():
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogExclusion:
@@ -3063,16 +3271,22 @@ async def sample_update_exclusion():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name, exclusion, update_mask])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name, exclusion, update_mask]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.UpdateExclusionRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.UpdateExclusionRequest):
+ request = logging_config.UpdateExclusionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -3085,11 +3299,9 @@ async def sample_update_exclusion():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.update_exclusion,
- default_timeout=120.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.update_exclusion
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3097,6 +3309,9 @@ async def sample_update_exclusion():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -3115,7 +3330,7 @@ async def delete_exclusion(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes an exclusion in the \_Default sink.
@@ -3163,23 +3378,31 @@ async def sample_delete_exclusion():
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.DeleteExclusionRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.DeleteExclusionRequest):
+ request = logging_config.DeleteExclusionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -3188,22 +3411,9 @@ async def sample_delete_exclusion():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_exclusion,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.delete_exclusion
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3211,6 +3421,9 @@ async def sample_delete_exclusion():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -3225,7 +3438,7 @@ async def get_cmek_settings(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.CmekSettings:
r"""Gets the Logging CMEK settings for the given resource.
@@ -3272,11 +3485,13 @@ async def sample_get_cmek_settings():
See `Enabling CMEK for Log
Router `__
for more information.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.CmekSettings:
@@ -3295,15 +3510,16 @@ async def sample_get_cmek_settings():
"""
# Create or coerce a protobuf request object.
- request = logging_config.GetCmekSettingsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.GetCmekSettingsRequest):
+ request = logging_config.GetCmekSettingsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_cmek_settings,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.get_cmek_settings
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3311,6 +3527,9 @@ async def sample_get_cmek_settings():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -3328,7 +3547,7 @@ async def update_cmek_settings(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.CmekSettings:
r"""Updates the Log Router CMEK settings for the given resource.
@@ -3380,11 +3599,13 @@ async def sample_update_cmek_settings():
See `Enabling CMEK for Log
Router `__
for more information.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.CmekSettings:
@@ -3403,15 +3624,16 @@ async def sample_update_cmek_settings():
"""
# Create or coerce a protobuf request object.
- request = logging_config.UpdateCmekSettingsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.UpdateCmekSettingsRequest):
+ request = logging_config.UpdateCmekSettingsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.update_cmek_settings,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.update_cmek_settings
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3419,6 +3641,9 @@ async def sample_update_cmek_settings():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -3437,7 +3662,7 @@ async def get_settings(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.Settings:
r"""Gets the Log Router settings for the given resource.
@@ -3509,11 +3734,13 @@ async def sample_get_settings():
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.Settings:
@@ -3523,16 +3750,22 @@ async def sample_get_settings():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.GetSettingsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.GetSettingsRequest):
+ request = logging_config.GetSettingsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -3541,11 +3774,9 @@ async def sample_get_settings():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_settings,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.get_settings
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3553,6 +3784,9 @@ async def sample_get_settings():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -3572,7 +3806,7 @@ async def update_settings(
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.Settings:
r"""Updates the Log Router settings for the given resource.
@@ -3651,11 +3885,13 @@ async def sample_update_settings():
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.Settings:
@@ -3665,16 +3901,22 @@ async def sample_update_settings():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([settings, update_mask])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [settings, update_mask]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_config.UpdateSettingsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.UpdateSettingsRequest):
+ request = logging_config.UpdateSettingsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -3685,11 +3927,9 @@ async def sample_update_settings():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.update_settings,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.update_settings
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3697,6 +3937,9 @@ async def sample_update_settings():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -3714,7 +3957,7 @@ async def copy_log_entries(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation_async.AsyncOperation:
r"""Copies a set of log entries from a log bucket to a
Cloud Storage bucket.
@@ -3753,11 +3996,13 @@ async def sample_copy_log_entries():
Args:
request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]):
The request object. The parameters to CopyLogEntries.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation_async.AsyncOperation:
@@ -3770,15 +4015,19 @@ async def sample_copy_log_entries():
"""
# Create or coerce a protobuf request object.
- request = logging_config.CopyLogEntriesRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_config.CopyLogEntriesRequest):
+ request = logging_config.CopyLogEntriesRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.copy_log_entries,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.copy_log_entries
+ ]
+
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
# Send the request.
response = await rpc(
@@ -3805,7 +4054,7 @@ async def list_operations(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
@@ -3813,11 +4062,13 @@ async def list_operations(
request (:class:`~.operations_pb2.ListOperationsRequest`):
The request object. Request message for
`ListOperations` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
@@ -3830,11 +4081,7 @@ async def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3842,6 +4089,9 @@ async def list_operations(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -3859,7 +4109,7 @@ async def get_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
@@ -3867,11 +4117,13 @@ async def get_operation(
request (:class:`~.operations_pb2.GetOperationRequest`):
The request object. Request message for
`GetOperation` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
@@ -3884,11 +4136,7 @@ async def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3896,6 +4144,9 @@ async def get_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -3913,7 +4164,7 @@ async def cancel_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
@@ -3925,11 +4176,13 @@ async def cancel_operation(
request (:class:`~.operations_pb2.CancelOperationRequest`):
The request object. Request message for
`CancelOperation` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
None
"""
@@ -3941,11 +4194,7 @@ async def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -3953,6 +4202,9 @@ async def cancel_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -3972,5 +4224,8 @@ async def __aexit__(self, exc_type, exc, tb):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
__all__ = ("ConfigServiceV2AsyncClient",)
diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py
index 5208fe442..6c97c6556 100644
--- a/google/cloud/logging_v2/services/config_service_v2/client.py
+++ b/google/cloud/logging_v2/services/config_service_v2/client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,10 +14,14 @@
# limitations under the License.
#
from collections import OrderedDict
+from http import HTTPStatus
+import json
+import logging as std_logging
import os
import re
from typing import (
Dict,
+ Callable,
Mapping,
MutableMapping,
MutableSequence,
@@ -28,6 +32,7 @@
Union,
cast,
)
+import warnings
from google.cloud.logging_v2 import gapic_version as package_version
@@ -40,11 +45,21 @@
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
- OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
+ OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
- OptionalRetry = Union[retries.Retry, object] # type: ignore
+ OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
@@ -128,11 +143,15 @@ def _get_default_mtls_endpoint(api_endpoint):
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+ # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
DEFAULT_ENDPOINT = "logging.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
+ _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}"
+ _DEFAULT_UNIVERSE = "googleapis.com"
+
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -395,7 +414,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]:
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
- """Return the API endpoint and client cert source for mutual TLS.
+ """Deprecated. Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
@@ -425,6 +444,11 @@ def get_mtls_endpoint_and_cert_source(
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
+
+ warnings.warn(
+ "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+ DeprecationWarning,
+ )
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
@@ -458,11 +482,180 @@ def get_mtls_endpoint_and_cert_source(
return api_endpoint, client_cert_source
+ @staticmethod
+ def _read_environment_variables():
+ """Returns the environment variables used by the client.
+
+ Returns:
+ Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+ GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+ Raises:
+ ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+ any of ["true", "false"].
+ google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+ is not any of ["auto", "never", "always"].
+ """
+ use_client_cert = os.getenv(
+ "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
+ ).lower()
+ use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+ universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+ if use_client_cert not in ("true", "false"):
+ raise ValueError(
+ "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+ if use_mtls_endpoint not in ("auto", "never", "always"):
+ raise MutualTLSChannelError(
+ "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+ return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+ @staticmethod
+ def _get_client_cert_source(provided_cert_source, use_cert_flag):
+ """Return the client cert source to be used by the client.
+
+ Args:
+ provided_cert_source (bytes): The client certificate source provided.
+ use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+ Returns:
+ bytes or None: The client cert source to be used by the client.
+ """
+ client_cert_source = None
+ if use_cert_flag:
+ if provided_cert_source:
+ client_cert_source = provided_cert_source
+ elif mtls.has_default_client_cert_source():
+ client_cert_source = mtls.default_client_cert_source()
+ return client_cert_source
+
+ @staticmethod
+ def _get_api_endpoint(
+ api_override, client_cert_source, universe_domain, use_mtls_endpoint
+ ):
+ """Return the API endpoint used by the client.
+
+ Args:
+ api_override (str): The API endpoint override. If specified, this is always
+ the return value of this function and the other arguments are not used.
+ client_cert_source (bytes): The client certificate source used by the client.
+ universe_domain (str): The universe domain used by the client.
+ use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+ Possible values are "always", "auto", or "never".
+
+ Returns:
+ str: The API endpoint to be used by the client.
+ """
+ if api_override is not None:
+ api_endpoint = api_override
+ elif use_mtls_endpoint == "always" or (
+ use_mtls_endpoint == "auto" and client_cert_source
+ ):
+ _default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE
+ if universe_domain != _default_universe:
+ raise MutualTLSChannelError(
+ f"mTLS is not supported in any universe other than {_default_universe}."
+ )
+ api_endpoint = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=universe_domain
+ )
+ return api_endpoint
+
+ @staticmethod
+ def _get_universe_domain(
+ client_universe_domain: Optional[str], universe_domain_env: Optional[str]
+ ) -> str:
+ """Return the universe domain used by the client.
+
+ Args:
+ client_universe_domain (Optional[str]): The universe domain configured via the client options.
+ universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+ Returns:
+ str: The universe domain to be used by the client.
+
+ Raises:
+ ValueError: If the universe domain is an empty string.
+ """
+ universe_domain = ConfigServiceV2Client._DEFAULT_UNIVERSE
+ if client_universe_domain is not None:
+ universe_domain = client_universe_domain
+ elif universe_domain_env is not None:
+ universe_domain = universe_domain_env
+ if len(universe_domain.strip()) == 0:
+ raise ValueError("Universe Domain cannot be an empty string.")
+ return universe_domain
+
+ def _validate_universe_domain(self):
+ """Validates client's and credentials' universe domains are consistent.
+
+ Returns:
+ bool: True iff the configured universe domain is valid.
+
+ Raises:
+ ValueError: If the configured universe domain is not valid.
+ """
+
+ # NOTE (b/349488459): universe validation is disabled until further notice.
+ return True
+
+ def _add_cred_info_for_auth_errors(
+ self, error: core_exceptions.GoogleAPICallError
+ ) -> None:
+ """Adds credential info string to error details for 401/403/404 errors.
+
+ Args:
+ error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info.
+ """
+ if error.code not in [
+ HTTPStatus.UNAUTHORIZED,
+ HTTPStatus.FORBIDDEN,
+ HTTPStatus.NOT_FOUND,
+ ]:
+ return
+
+ cred = self._transport._credentials
+
+ # get_cred_info is only available in google-auth>=2.35.0
+ if not hasattr(cred, "get_cred_info"):
+ return
+
+ # ignore the type check since pypy test fails when get_cred_info
+ # is not available
+ cred_info = cred.get_cred_info() # type: ignore
+ if cred_info and hasattr(error._details, "append"):
+ error._details.append(json.dumps(cred_info))
+
+ @property
+ def api_endpoint(self):
+ """Return the API endpoint used by the client instance.
+
+ Returns:
+ str: The API endpoint used by the client instance.
+ """
+ return self._api_endpoint
+
+ @property
+ def universe_domain(self) -> str:
+ """Return the universe domain used by the client instance.
+
+ Returns:
+ str: The universe domain used by the client instance.
+ """
+ return self._universe_domain
+
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
- transport: Optional[Union[str, ConfigServiceV2Transport]] = None,
+ transport: Optional[
+ Union[
+ str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]
+ ]
+ ] = None,
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -474,25 +667,37 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ConfigServiceV2Transport]): The
- transport to use. If set to None, a transport is chosen
- automatically.
- client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the
- client. It won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
+ transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]):
+ The transport to use, or a Callable that constructs and returns a new transport.
+ If a Callable is given, it will be called with the same set of initialization
+ arguments as used in the ConfigServiceV2Transport constructor.
+ If set to None, a transport is chosen automatically.
+ client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+ Custom options for the client.
+
+ 1. The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client when ``transport`` is
+ not explicitly provided. Only if this property is not set and
+ ``transport`` was not explicitly provided, the endpoint is
+ determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+ variable, which have one of the following values:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ use the default regular endpoint) and "auto" (auto-switch to the
+ default mTLS endpoint if client certificate is present; this is
+ the default value).
+
+ 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
+ to provide a client certificate for mTLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
+
+ 3. The ``universe_domain`` property can be used to override the
+ default "googleapis.com" universe. Note that the ``api_endpoint``
+ property still takes precedence; and ``universe_domain`` is
+ currently not supported for mTLS.
+
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
@@ -503,17 +708,38 @@ def __init__(
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
- if isinstance(client_options, dict):
- client_options = client_options_lib.from_dict(client_options)
- if client_options is None:
- client_options = client_options_lib.ClientOptions()
- client_options = cast(client_options_lib.ClientOptions, client_options)
+ self._client_options = client_options
+ if isinstance(self._client_options, dict):
+ self._client_options = client_options_lib.from_dict(self._client_options)
+ if self._client_options is None:
+ self._client_options = client_options_lib.ClientOptions()
+ self._client_options = cast(
+ client_options_lib.ClientOptions, self._client_options
+ )
- api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
- client_options
+ universe_domain_opt = getattr(self._client_options, "universe_domain", None)
+
+ (
+ self._use_client_cert,
+ self._use_mtls_endpoint,
+ self._universe_domain_env,
+ ) = ConfigServiceV2Client._read_environment_variables()
+ self._client_cert_source = ConfigServiceV2Client._get_client_cert_source(
+ self._client_options.client_cert_source, self._use_client_cert
+ )
+ self._universe_domain = ConfigServiceV2Client._get_universe_domain(
+ universe_domain_opt, self._universe_domain_env
)
+ self._api_endpoint = None # updated below, depending on `transport`
+
+ # Initialize the universe domain validation.
+ self._is_universe_domain_valid = False
- api_key_value = getattr(client_options, "api_key", None)
+ if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER
+ # Setup logging.
+ client_logging.initialize_logging()
+
+ api_key_value = getattr(self._client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
@@ -522,20 +748,33 @@ def __init__(
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
- if isinstance(transport, ConfigServiceV2Transport):
+ transport_provided = isinstance(transport, ConfigServiceV2Transport)
+ if transport_provided:
# transport is a ConfigServiceV2Transport instance.
- if credentials or client_options.credentials_file or api_key_value:
+ if credentials or self._client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
- if client_options.scopes:
+ if self._client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
- self._transport = transport
- else:
+ self._transport = cast(ConfigServiceV2Transport, transport)
+ self._api_endpoint = self._transport.host
+
+ self._api_endpoint = (
+ self._api_endpoint
+ or ConfigServiceV2Client._get_api_endpoint(
+ self._client_options.api_endpoint,
+ self._client_cert_source,
+ self._universe_domain,
+ self._use_mtls_endpoint,
+ )
+ )
+
+ if not transport_provided:
import google.auth._default # type: ignore
if api_key_value and hasattr(
@@ -545,19 +784,49 @@ def __init__(
api_key_value
)
- Transport = type(self).get_transport_class(transport)
- self._transport = Transport(
+ transport_init: Union[
+ Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport]
+ ] = (
+ ConfigServiceV2Client.get_transport_class(transport)
+ if isinstance(transport, str) or transport is None
+ else cast(Callable[..., ConfigServiceV2Transport], transport)
+ )
+ # initialize with the provided callable or the passed in class
+ self._transport = transport_init(
credentials=credentials,
- credentials_file=client_options.credentials_file,
- host=api_endpoint,
- scopes=client_options.scopes,
- client_cert_source_for_mtls=client_cert_source_func,
- quota_project_id=client_options.quota_project_id,
+ credentials_file=self._client_options.credentials_file,
+ host=self._api_endpoint,
+ scopes=self._client_options.scopes,
+ client_cert_source_for_mtls=self._client_cert_source,
+ quota_project_id=self._client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
- api_audience=client_options.api_audience,
+ api_audience=self._client_options.api_audience,
)
+ if "async" not in str(self._transport):
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.logging_v2.ConfigServiceV2Client`.",
+ extra={
+ "serviceName": "google.logging.v2.ConfigServiceV2",
+ "universeDomain": getattr(
+ self._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._transport, "_credentials")
+ else {
+ "serviceName": "google.logging.v2.ConfigServiceV2",
+ "credentialsType": None,
+ },
+ )
+
def list_buckets(
self,
request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None,
@@ -565,7 +834,7 @@ def list_buckets(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListBucketsPager:
r"""Lists log buckets.
@@ -620,8 +889,10 @@ def sample_list_buckets():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager:
@@ -633,19 +904,20 @@ def sample_list_buckets():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.ListBucketsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.ListBucketsRequest):
request = logging_config.ListBucketsRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -663,6 +935,9 @@ def sample_list_buckets():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -677,6 +952,8 @@ def sample_list_buckets():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -689,7 +966,7 @@ def get_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogBucket:
r"""Gets a log bucket.
@@ -725,8 +1002,10 @@ def sample_get_bucket():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogBucket:
@@ -735,10 +1014,8 @@ def sample_get_bucket():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.GetBucketRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.GetBucketRequest):
request = logging_config.GetBucketRequest(request)
@@ -752,6 +1029,9 @@ def sample_get_bucket():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -769,7 +1049,7 @@ def create_bucket_async(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation.Operation:
r"""Creates a log bucket asynchronously that can be used
to store log entries.
@@ -813,8 +1093,10 @@ def sample_create_bucket_async():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation.Operation:
@@ -826,10 +1108,8 @@ def sample_create_bucket_async():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.CreateBucketRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.CreateBucketRequest):
request = logging_config.CreateBucketRequest(request)
@@ -843,6 +1123,9 @@ def sample_create_bucket_async():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -868,7 +1151,7 @@ def update_bucket_async(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation.Operation:
r"""Updates a log bucket asynchronously.
@@ -914,8 +1197,10 @@ def sample_update_bucket_async():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation.Operation:
@@ -927,10 +1212,8 @@ def sample_update_bucket_async():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.UpdateBucketRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.UpdateBucketRequest):
request = logging_config.UpdateBucketRequest(request)
@@ -944,6 +1227,9 @@ def sample_update_bucket_async():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -969,7 +1255,7 @@ def create_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogBucket:
r"""Creates a log bucket that can be used to store log
entries. After a bucket has been created, the bucket's
@@ -1008,8 +1294,10 @@ def sample_create_bucket():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogBucket:
@@ -1018,10 +1306,8 @@ def sample_create_bucket():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.CreateBucketRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.CreateBucketRequest):
request = logging_config.CreateBucketRequest(request)
@@ -1035,6 +1321,9 @@ def sample_create_bucket():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1052,7 +1341,7 @@ def update_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogBucket:
r"""Updates a log bucket.
@@ -1094,8 +1383,10 @@ def sample_update_bucket():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogBucket:
@@ -1104,10 +1395,8 @@ def sample_update_bucket():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.UpdateBucketRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.UpdateBucketRequest):
request = logging_config.UpdateBucketRequest(request)
@@ -1121,6 +1410,9 @@ def sample_update_bucket():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1138,7 +1430,7 @@ def delete_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes a log bucket.
@@ -1176,14 +1468,14 @@ def sample_delete_bucket():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.DeleteBucketRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.DeleteBucketRequest):
request = logging_config.DeleteBucketRequest(request)
@@ -1197,6 +1489,9 @@ def sample_delete_bucket():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -1211,7 +1506,7 @@ def undelete_bucket(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Undeletes a log bucket. A bucket that has been
deleted can be undeleted within the grace period of 7
@@ -1246,14 +1541,14 @@ def sample_undelete_bucket():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.UndeleteBucketRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.UndeleteBucketRequest):
request = logging_config.UndeleteBucketRequest(request)
@@ -1267,6 +1562,9 @@ def sample_undelete_bucket():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -1282,7 +1580,7 @@ def list_views(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListViewsPager:
r"""Lists views on a log bucket.
@@ -1329,8 +1627,10 @@ def sample_list_views():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager:
@@ -1342,19 +1642,20 @@ def sample_list_views():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.ListViewsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.ListViewsRequest):
request = logging_config.ListViewsRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -1372,6 +1673,9 @@ def sample_list_views():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1386,6 +1690,8 @@ def sample_list_views():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -1398,7 +1704,7 @@ def get_view(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogView:
r"""Gets a view on a log bucket..
@@ -1434,8 +1740,10 @@ def sample_get_view():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogView:
@@ -1444,10 +1752,8 @@ def sample_get_view():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.GetViewRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.GetViewRequest):
request = logging_config.GetViewRequest(request)
@@ -1461,6 +1767,9 @@ def sample_get_view():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1478,7 +1787,7 @@ def create_view(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogView:
r"""Creates a view over log entries in a log bucket. A
bucket may contain a maximum of 30 views.
@@ -1516,8 +1825,10 @@ def sample_create_view():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogView:
@@ -1526,10 +1837,8 @@ def sample_create_view():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.CreateViewRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.CreateViewRequest):
request = logging_config.CreateViewRequest(request)
@@ -1543,6 +1852,9 @@ def sample_create_view():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1560,7 +1872,7 @@ def update_view(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogView:
r"""Updates a view on a log bucket. This method replaces the
following fields in the existing view with values from the new
@@ -1600,8 +1912,10 @@ def sample_update_view():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogView:
@@ -1610,10 +1924,8 @@ def sample_update_view():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.UpdateViewRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.UpdateViewRequest):
request = logging_config.UpdateViewRequest(request)
@@ -1627,6 +1939,9 @@ def sample_update_view():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1644,7 +1959,7 @@ def delete_view(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is
returned, this indicates that system is not in a state where it
@@ -1680,14 +1995,14 @@ def sample_delete_view():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.DeleteViewRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.DeleteViewRequest):
request = logging_config.DeleteViewRequest(request)
@@ -1701,6 +2016,9 @@ def sample_delete_view():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -1716,7 +2034,7 @@ def list_sinks(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListSinksPager:
r"""Lists sinks.
@@ -1767,8 +2085,10 @@ def sample_list_sinks():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager:
@@ -1779,19 +2099,20 @@ def sample_list_sinks():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.ListSinksRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.ListSinksRequest):
request = logging_config.ListSinksRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -1809,6 +2130,9 @@ def sample_list_sinks():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1823,6 +2147,8 @@ def sample_list_sinks():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -1836,7 +2162,7 @@ def get_sink(
sink_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogSink:
r"""Gets a sink.
@@ -1889,8 +2215,10 @@ def sample_get_sink():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogSink:
@@ -1906,19 +2234,20 @@ def sample_get_sink():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([sink_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [sink_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.GetSinkRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.GetSinkRequest):
request = logging_config.GetSinkRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -1938,6 +2267,9 @@ def sample_get_sink():
),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1957,7 +2289,7 @@ def create_sink(
sink: Optional[logging_config.LogSink] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogSink:
r"""Creates a sink that exports specified log entries to a
destination. The export of newly-ingested log entries begins
@@ -2026,8 +2358,10 @@ def sample_create_sink():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogSink:
@@ -2043,19 +2377,20 @@ def sample_create_sink():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent, sink])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent, sink]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.CreateSinkRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.CreateSinkRequest):
request = logging_config.CreateSinkRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -2075,6 +2410,9 @@ def sample_create_sink():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -2095,7 +2433,7 @@ def update_sink(
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogSink:
r"""Updates a sink. This method replaces the following fields in the
existing sink with values from the new sink: ``destination``,
@@ -2188,8 +2526,10 @@ def sample_update_sink():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogSink:
@@ -2205,19 +2545,20 @@ def sample_update_sink():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([sink_name, sink, update_mask])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [sink_name, sink, update_mask]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.UpdateSinkRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.UpdateSinkRequest):
request = logging_config.UpdateSinkRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -2241,6 +2582,9 @@ def sample_update_sink():
),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -2259,7 +2603,7 @@ def delete_sink(
sink_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes a sink. If the sink has a unique ``writer_identity``,
then that service account is also deleted.
@@ -2311,23 +2655,26 @@ def sample_delete_sink():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([sink_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [sink_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.DeleteSinkRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.DeleteSinkRequest):
request = logging_config.DeleteSinkRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -2347,6 +2694,9 @@ def sample_delete_sink():
),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -2364,7 +2714,7 @@ def create_link(
link_id: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation.Operation:
r"""Asynchronously creates a linked dataset in BigQuery
which makes it possible to use BigQuery to read the logs
@@ -2435,8 +2785,10 @@ def sample_create_link():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation.Operation:
@@ -2448,19 +2800,20 @@ def sample_create_link():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent, link, link_id])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent, link, link_id]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.CreateLinkRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.CreateLinkRequest):
request = logging_config.CreateLinkRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -2482,6 +2835,9 @@ def sample_create_link():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -2508,7 +2864,7 @@ def delete_link(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation.Operation:
r"""Deletes a link. This will also delete the
corresponding BigQuery linked dataset.
@@ -2560,8 +2916,10 @@ def sample_delete_link():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation.Operation:
@@ -2580,19 +2938,20 @@ def sample_delete_link():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.DeleteLinkRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.DeleteLinkRequest):
request = logging_config.DeleteLinkRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -2610,6 +2969,9 @@ def sample_delete_link():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -2636,7 +2998,7 @@ def list_links(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListLinksPager:
r"""Lists links.
@@ -2685,8 +3047,10 @@ def sample_list_links():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager:
@@ -2698,19 +3062,20 @@ def sample_list_links():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.ListLinksRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.ListLinksRequest):
request = logging_config.ListLinksRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -2728,6 +3093,9 @@ def sample_list_links():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -2742,6 +3110,8 @@ def sample_list_links():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -2755,7 +3125,7 @@ def get_link(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.Link:
r"""Gets a link.
@@ -2802,8 +3172,10 @@ def sample_get_link():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.Link:
@@ -2812,19 +3184,20 @@ def sample_get_link():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.GetLinkRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.GetLinkRequest):
request = logging_config.GetLinkRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -2842,6 +3215,9 @@ def sample_get_link():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -2860,7 +3236,7 @@ def list_exclusions(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListExclusionsPager:
r"""Lists all the exclusions on the \_Default sink in a parent
resource.
@@ -2912,8 +3288,10 @@ def sample_list_exclusions():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager:
@@ -2924,19 +3302,20 @@ def sample_list_exclusions():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.ListExclusionsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.ListExclusionsRequest):
request = logging_config.ListExclusionsRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -2954,6 +3333,9 @@ def sample_list_exclusions():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -2968,6 +3350,8 @@ def sample_list_exclusions():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -2981,7 +3365,7 @@ def get_exclusion(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogExclusion:
r"""Gets the description of an exclusion in the \_Default sink.
@@ -3034,8 +3418,10 @@ def sample_get_exclusion():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogExclusion:
@@ -3049,19 +3435,20 @@ def sample_get_exclusion():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.GetExclusionRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.GetExclusionRequest):
request = logging_config.GetExclusionRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -3079,6 +3466,9 @@ def sample_get_exclusion():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -3098,7 +3488,7 @@ def create_exclusion(
exclusion: Optional[logging_config.LogExclusion] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogExclusion:
r"""Creates a new exclusion in the \_Default sink in a specified
parent resource. Only log entries belonging to that resource can
@@ -3168,8 +3558,10 @@ def sample_create_exclusion():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogExclusion:
@@ -3183,19 +3575,20 @@ def sample_create_exclusion():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent, exclusion])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent, exclusion]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.CreateExclusionRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.CreateExclusionRequest):
request = logging_config.CreateExclusionRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -3215,6 +3608,9 @@ def sample_create_exclusion():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -3235,7 +3631,7 @@ def update_exclusion(
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.LogExclusion:
r"""Changes one or more properties of an existing exclusion in the
\_Default sink.
@@ -3316,8 +3712,10 @@ def sample_update_exclusion():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogExclusion:
@@ -3331,19 +3729,20 @@ def sample_update_exclusion():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name, exclusion, update_mask])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name, exclusion, update_mask]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.UpdateExclusionRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.UpdateExclusionRequest):
request = logging_config.UpdateExclusionRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -3365,6 +3764,9 @@ def sample_update_exclusion():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -3383,7 +3785,7 @@ def delete_exclusion(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes an exclusion in the \_Default sink.
@@ -3434,23 +3836,26 @@ def sample_delete_exclusion():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.DeleteExclusionRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.DeleteExclusionRequest):
request = logging_config.DeleteExclusionRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -3468,6 +3873,9 @@ def sample_delete_exclusion():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -3482,7 +3890,7 @@ def get_cmek_settings(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.CmekSettings:
r"""Gets the Logging CMEK settings for the given resource.
@@ -3532,8 +3940,10 @@ def sample_get_cmek_settings():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.CmekSettings:
@@ -3552,10 +3962,8 @@ def sample_get_cmek_settings():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.GetCmekSettingsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.GetCmekSettingsRequest):
request = logging_config.GetCmekSettingsRequest(request)
@@ -3569,6 +3977,9 @@ def sample_get_cmek_settings():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -3586,7 +3997,7 @@ def update_cmek_settings(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.CmekSettings:
r"""Updates the Log Router CMEK settings for the given resource.
@@ -3641,8 +4052,10 @@ def sample_update_cmek_settings():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.CmekSettings:
@@ -3661,10 +4074,8 @@ def sample_update_cmek_settings():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.UpdateCmekSettingsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.UpdateCmekSettingsRequest):
request = logging_config.UpdateCmekSettingsRequest(request)
@@ -3678,6 +4089,9 @@ def sample_update_cmek_settings():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -3696,7 +4110,7 @@ def get_settings(
name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.Settings:
r"""Gets the Log Router settings for the given resource.
@@ -3771,8 +4185,10 @@ def sample_get_settings():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.Settings:
@@ -3782,19 +4198,20 @@ def sample_get_settings():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.GetSettingsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.GetSettingsRequest):
request = logging_config.GetSettingsRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -3812,6 +4229,9 @@ def sample_get_settings():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -3831,7 +4251,7 @@ def update_settings(
update_mask: Optional[field_mask_pb2.FieldMask] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_config.Settings:
r"""Updates the Log Router settings for the given resource.
@@ -3913,8 +4333,10 @@ def sample_update_settings():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.Settings:
@@ -3924,19 +4346,20 @@ def sample_update_settings():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([settings, update_mask])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [settings, update_mask]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.UpdateSettingsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.UpdateSettingsRequest):
request = logging_config.UpdateSettingsRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -3956,6 +4379,9 @@ def sample_update_settings():
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -3973,7 +4399,7 @@ def copy_log_entries(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operation.Operation:
r"""Copies a set of log entries from a log bucket to a
Cloud Storage bucket.
@@ -4015,8 +4441,10 @@ def sample_copy_log_entries():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.api_core.operation.Operation:
@@ -4029,10 +4457,8 @@ def sample_copy_log_entries():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_config.CopyLogEntriesRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_config.CopyLogEntriesRequest):
request = logging_config.CopyLogEntriesRequest(request)
@@ -4040,6 +4466,9 @@ def sample_copy_log_entries():
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.copy_log_entries]
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -4078,7 +4507,7 @@ def list_operations(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
@@ -4089,8 +4518,10 @@ def list_operations(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
@@ -4103,11 +4534,7 @@ def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -4115,16 +4542,23 @@ def list_operations(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
- # Send the request.
- response = rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
+ # Validate the universe domain.
+ self._validate_universe_domain()
- # Done; return the response.
- return response
+ try:
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+ except core_exceptions.GoogleAPICallError as e:
+ self._add_cred_info_for_auth_errors(e)
+ raise e
def get_operation(
self,
@@ -4132,7 +4566,7 @@ def get_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
@@ -4143,8 +4577,10 @@ def get_operation(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
@@ -4157,11 +4593,7 @@ def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -4169,16 +4601,23 @@ def get_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
- # Send the request.
- response = rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
+ # Validate the universe domain.
+ self._validate_universe_domain()
- # Done; return the response.
- return response
+ try:
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+ except core_exceptions.GoogleAPICallError as e:
+ self._add_cred_info_for_auth_errors(e)
+ raise e
def cancel_operation(
self,
@@ -4186,7 +4625,7 @@ def cancel_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
@@ -4201,8 +4640,10 @@ def cancel_operation(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
None
"""
@@ -4214,11 +4655,7 @@ def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -4226,6 +4663,9 @@ def cancel_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -4239,5 +4679,7 @@ def cancel_operation(
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
__all__ = ("ConfigServiceV2Client",)
diff --git a/google/cloud/logging_v2/services/config_service_v2/pagers.py b/google/cloud/logging_v2/services/config_service_v2/pagers.py
index 4af8eaf1c..62906815e 100644
--- a/google/cloud/logging_v2/services/config_service_v2/pagers.py
+++ b/google/cloud/logging_v2/services/config_service_v2/pagers.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
from typing import (
Any,
AsyncIterator,
@@ -22,8 +25,18 @@
Tuple,
Optional,
Iterator,
+ Union,
)
+try:
+ OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+ OptionalAsyncRetry = Union[
+ retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None
+ ]
+except AttributeError: # pragma: NO COVER
+ OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+ OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore
+
from google.cloud.logging_v2.types import logging_config
@@ -51,7 +64,9 @@ def __init__(
request: logging_config.ListBucketsRequest,
response: logging_config.ListBucketsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -62,12 +77,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListBucketsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListBucketsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -78,7 +100,12 @@ def pages(self) -> Iterator[logging_config.ListBucketsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[logging_config.LogBucket]:
@@ -113,7 +140,9 @@ def __init__(
request: logging_config.ListBucketsRequest,
response: logging_config.ListBucketsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -124,12 +153,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListBucketsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListBucketsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -140,7 +176,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]:
@@ -179,7 +220,9 @@ def __init__(
request: logging_config.ListViewsRequest,
response: logging_config.ListViewsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -190,12 +233,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListViewsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListViewsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -206,7 +256,12 @@ def pages(self) -> Iterator[logging_config.ListViewsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[logging_config.LogView]:
@@ -241,7 +296,9 @@ def __init__(
request: logging_config.ListViewsRequest,
response: logging_config.ListViewsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -252,12 +309,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListViewsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListViewsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -268,7 +332,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(self) -> AsyncIterator[logging_config.LogView]:
@@ -307,7 +376,9 @@ def __init__(
request: logging_config.ListSinksRequest,
response: logging_config.ListSinksResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -318,12 +389,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListSinksResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListSinksRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -334,7 +412,12 @@ def pages(self) -> Iterator[logging_config.ListSinksResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[logging_config.LogSink]:
@@ -369,7 +452,9 @@ def __init__(
request: logging_config.ListSinksRequest,
response: logging_config.ListSinksResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -380,12 +465,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListSinksResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListSinksRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -396,7 +488,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(self) -> AsyncIterator[logging_config.LogSink]:
@@ -435,7 +532,9 @@ def __init__(
request: logging_config.ListLinksRequest,
response: logging_config.ListLinksResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -446,12 +545,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListLinksResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListLinksRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -462,7 +568,12 @@ def pages(self) -> Iterator[logging_config.ListLinksResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[logging_config.Link]:
@@ -497,7 +608,9 @@ def __init__(
request: logging_config.ListLinksRequest,
response: logging_config.ListLinksResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -508,12 +621,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListLinksResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListLinksRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -524,7 +644,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(self) -> AsyncIterator[logging_config.Link]:
@@ -563,7 +688,9 @@ def __init__(
request: logging_config.ListExclusionsRequest,
response: logging_config.ListExclusionsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -574,12 +701,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListExclusionsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListExclusionsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -590,7 +724,12 @@ def pages(self) -> Iterator[logging_config.ListExclusionsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[logging_config.LogExclusion]:
@@ -625,7 +764,9 @@ def __init__(
request: logging_config.ListExclusionsRequest,
response: logging_config.ListExclusionsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -636,12 +777,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListExclusionsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_config.ListExclusionsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -652,7 +800,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]:
diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/README.rst b/google/cloud/logging_v2/services/config_service_v2/transports/README.rst
new file mode 100644
index 000000000..4ea848796
--- /dev/null
+++ b/google/cloud/logging_v2/services/config_service_v2/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`ConfigServiceV2Transport` is the ABC for all transports.
+- public child `ConfigServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `ConfigServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseConfigServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `ConfigServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py
index fd02975e4..6f8979ef8 100644
--- a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py
+++ b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py
index 73db34bed..db7b93b85 100644
--- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py
+++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -26,6 +26,7 @@
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
from google.cloud.logging_v2.types import logging_config
from google.longrunning import operations_pb2 # type: ignore
@@ -35,6 +36,9 @@
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class ConfigServiceV2Transport(abc.ABC):
"""Abstract transport class for ConfigServiceV2."""
@@ -65,7 +69,7 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -90,6 +94,8 @@ def __init__(
# Save the scopes.
self._scopes = scopes
+ if not hasattr(self, "_ignore_credentials"):
+ self._ignore_credentials: bool = False
# If no credentials are provided, then determine the appropriate
# defaults.
@@ -102,7 +108,7 @@ def __init__(
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
- elif credentials is None:
+ elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
@@ -128,6 +134,10 @@ def __init__(
host += ":443"
self._host = host
+ @property
+ def host(self):
+ return self._host
+
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -368,6 +378,21 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=None,
client_info=client_info,
),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
}
def close(self):
diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
index b82203cf6..4dee4e647 100644
--- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
+++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import json
+import logging as std_logging
+import pickle
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
@@ -22,14 +25,91 @@
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from google.cloud.logging_v2.types import logging_config
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER
+ def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.logging.v2.ConfigServiceV2",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = response.result()
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response for {client_call_details.method}.",
+ extra={
+ "serviceName": "google.logging.v2.ConfigServiceV2",
+ "rpcName": client_call_details.method,
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport):
"""gRPC backend transport for ConfigServiceV2.
@@ -53,7 +133,7 @@ def __init__(
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
- channel: Optional[grpc.Channel] = None,
+ channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
@@ -67,20 +147,23 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
- ignored if ``channel`` is provided.
- channel (Optional[grpc.Channel]): A ``Channel`` instance through
- which to make calls.
+ ignored if a ``channel`` instance is provided.
+ channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+ A ``Channel`` instance through which to make calls, or a Callable
+ that constructs and returns one. If set to None, ``self.create_channel``
+ is used to create the channel. If a Callable is given, it will be called
+ with the same arguments as used in ``self.create_channel``.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
@@ -90,11 +173,11 @@ def __init__(
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for the grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if a ``channel`` instance is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
- ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -121,9 +204,10 @@ def __init__(
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- if channel:
+ if isinstance(channel, grpc.Channel):
# Ignore credentials if a channel was passed.
- credentials = False
+ credentials = None
+ self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
@@ -162,7 +246,9 @@ def __init__(
)
if not self._grpc_channel:
- self._grpc_channel = type(self).create_channel(
+ # initialize with the provided callable or the default channel
+ channel_init = channel or type(self).create_channel
+ self._grpc_channel = channel_init(
self._host,
# use the credentials which are saved
credentials=self._credentials,
@@ -178,7 +264,12 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientInterceptor()
+ self._logged_channel = grpc.intercept_channel(
+ self._grpc_channel, self._interceptor
+ )
+
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
@@ -242,7 +333,9 @@ def operations_client(self) -> operations_v1.OperationsClient:
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
- self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
+ self._operations_client = operations_v1.OperationsClient(
+ self._logged_channel
+ )
# Return the client from cache.
return self._operations_client
@@ -268,7 +361,7 @@ def list_buckets(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_buckets" not in self._stubs:
- self._stubs["list_buckets"] = self.grpc_channel.unary_unary(
+ self._stubs["list_buckets"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListBuckets",
request_serializer=logging_config.ListBucketsRequest.serialize,
response_deserializer=logging_config.ListBucketsResponse.deserialize,
@@ -294,7 +387,7 @@ def get_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_bucket" not in self._stubs:
- self._stubs["get_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["get_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetBucket",
request_serializer=logging_config.GetBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
@@ -323,7 +416,7 @@ def create_bucket_async(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_bucket_async" not in self._stubs:
- self._stubs["create_bucket_async"] = self.grpc_channel.unary_unary(
+ self._stubs["create_bucket_async"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateBucketAsync",
request_serializer=logging_config.CreateBucketRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -355,7 +448,7 @@ def update_bucket_async(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_bucket_async" not in self._stubs:
- self._stubs["update_bucket_async"] = self.grpc_channel.unary_unary(
+ self._stubs["update_bucket_async"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateBucketAsync",
request_serializer=logging_config.UpdateBucketRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -383,7 +476,7 @@ def create_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_bucket" not in self._stubs:
- self._stubs["create_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["create_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateBucket",
request_serializer=logging_config.CreateBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
@@ -415,7 +508,7 @@ def update_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_bucket" not in self._stubs:
- self._stubs["update_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["update_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateBucket",
request_serializer=logging_config.UpdateBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
@@ -446,7 +539,7 @@ def delete_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_bucket" not in self._stubs:
- self._stubs["delete_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteBucket",
request_serializer=logging_config.DeleteBucketRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -474,7 +567,7 @@ def undelete_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "undelete_bucket" not in self._stubs:
- self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["undelete_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UndeleteBucket",
request_serializer=logging_config.UndeleteBucketRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -500,7 +593,7 @@ def list_views(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_views" not in self._stubs:
- self._stubs["list_views"] = self.grpc_channel.unary_unary(
+ self._stubs["list_views"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListViews",
request_serializer=logging_config.ListViewsRequest.serialize,
response_deserializer=logging_config.ListViewsResponse.deserialize,
@@ -526,7 +619,7 @@ def get_view(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_view" not in self._stubs:
- self._stubs["get_view"] = self.grpc_channel.unary_unary(
+ self._stubs["get_view"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetView",
request_serializer=logging_config.GetViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
@@ -553,7 +646,7 @@ def create_view(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_view" not in self._stubs:
- self._stubs["create_view"] = self.grpc_channel.unary_unary(
+ self._stubs["create_view"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateView",
request_serializer=logging_config.CreateViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
@@ -583,7 +676,7 @@ def update_view(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_view" not in self._stubs:
- self._stubs["update_view"] = self.grpc_channel.unary_unary(
+ self._stubs["update_view"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateView",
request_serializer=logging_config.UpdateViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
@@ -612,7 +705,7 @@ def delete_view(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_view" not in self._stubs:
- self._stubs["delete_view"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_view"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteView",
request_serializer=logging_config.DeleteViewRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -638,7 +731,7 @@ def list_sinks(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_sinks" not in self._stubs:
- self._stubs["list_sinks"] = self.grpc_channel.unary_unary(
+ self._stubs["list_sinks"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListSinks",
request_serializer=logging_config.ListSinksRequest.serialize,
response_deserializer=logging_config.ListSinksResponse.deserialize,
@@ -664,7 +757,7 @@ def get_sink(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_sink" not in self._stubs:
- self._stubs["get_sink"] = self.grpc_channel.unary_unary(
+ self._stubs["get_sink"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetSink",
request_serializer=logging_config.GetSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
@@ -694,7 +787,7 @@ def create_sink(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_sink" not in self._stubs:
- self._stubs["create_sink"] = self.grpc_channel.unary_unary(
+ self._stubs["create_sink"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateSink",
request_serializer=logging_config.CreateSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
@@ -725,7 +818,7 @@ def update_sink(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_sink" not in self._stubs:
- self._stubs["update_sink"] = self.grpc_channel.unary_unary(
+ self._stubs["update_sink"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateSink",
request_serializer=logging_config.UpdateSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
@@ -752,7 +845,7 @@ def delete_sink(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_sink" not in self._stubs:
- self._stubs["delete_sink"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_sink"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteSink",
request_serializer=logging_config.DeleteSinkRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -781,7 +874,7 @@ def create_link(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_link" not in self._stubs:
- self._stubs["create_link"] = self.grpc_channel.unary_unary(
+ self._stubs["create_link"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateLink",
request_serializer=logging_config.CreateLinkRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -808,7 +901,7 @@ def delete_link(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_link" not in self._stubs:
- self._stubs["delete_link"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_link"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteLink",
request_serializer=logging_config.DeleteLinkRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -834,7 +927,7 @@ def list_links(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_links" not in self._stubs:
- self._stubs["list_links"] = self.grpc_channel.unary_unary(
+ self._stubs["list_links"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListLinks",
request_serializer=logging_config.ListLinksRequest.serialize,
response_deserializer=logging_config.ListLinksResponse.deserialize,
@@ -860,7 +953,7 @@ def get_link(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_link" not in self._stubs:
- self._stubs["get_link"] = self.grpc_channel.unary_unary(
+ self._stubs["get_link"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetLink",
request_serializer=logging_config.GetLinkRequest.serialize,
response_deserializer=logging_config.Link.deserialize,
@@ -889,7 +982,7 @@ def list_exclusions(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_exclusions" not in self._stubs:
- self._stubs["list_exclusions"] = self.grpc_channel.unary_unary(
+ self._stubs["list_exclusions"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListExclusions",
request_serializer=logging_config.ListExclusionsRequest.serialize,
response_deserializer=logging_config.ListExclusionsResponse.deserialize,
@@ -915,7 +1008,7 @@ def get_exclusion(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_exclusion" not in self._stubs:
- self._stubs["get_exclusion"] = self.grpc_channel.unary_unary(
+ self._stubs["get_exclusion"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetExclusion",
request_serializer=logging_config.GetExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
@@ -943,7 +1036,7 @@ def create_exclusion(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_exclusion" not in self._stubs:
- self._stubs["create_exclusion"] = self.grpc_channel.unary_unary(
+ self._stubs["create_exclusion"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateExclusion",
request_serializer=logging_config.CreateExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
@@ -970,7 +1063,7 @@ def update_exclusion(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_exclusion" not in self._stubs:
- self._stubs["update_exclusion"] = self.grpc_channel.unary_unary(
+ self._stubs["update_exclusion"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateExclusion",
request_serializer=logging_config.UpdateExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
@@ -996,7 +1089,7 @@ def delete_exclusion(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_exclusion" not in self._stubs:
- self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_exclusion"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteExclusion",
request_serializer=logging_config.DeleteExclusionRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -1031,7 +1124,7 @@ def get_cmek_settings(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_cmek_settings" not in self._stubs:
- self._stubs["get_cmek_settings"] = self.grpc_channel.unary_unary(
+ self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetCmekSettings",
request_serializer=logging_config.GetCmekSettingsRequest.serialize,
response_deserializer=logging_config.CmekSettings.deserialize,
@@ -1073,7 +1166,7 @@ def update_cmek_settings(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_cmek_settings" not in self._stubs:
- self._stubs["update_cmek_settings"] = self.grpc_channel.unary_unary(
+ self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateCmekSettings",
request_serializer=logging_config.UpdateCmekSettingsRequest.serialize,
response_deserializer=logging_config.CmekSettings.deserialize,
@@ -1109,7 +1202,7 @@ def get_settings(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_settings" not in self._stubs:
- self._stubs["get_settings"] = self.grpc_channel.unary_unary(
+ self._stubs["get_settings"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetSettings",
request_serializer=logging_config.GetSettingsRequest.serialize,
response_deserializer=logging_config.Settings.deserialize,
@@ -1152,7 +1245,7 @@ def update_settings(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_settings" not in self._stubs:
- self._stubs["update_settings"] = self.grpc_channel.unary_unary(
+ self._stubs["update_settings"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateSettings",
request_serializer=logging_config.UpdateSettingsRequest.serialize,
response_deserializer=logging_config.Settings.deserialize,
@@ -1179,7 +1272,7 @@ def copy_log_entries(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "copy_log_entries" not in self._stubs:
- self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary(
+ self._stubs["copy_log_entries"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CopyLogEntries",
request_serializer=logging_config.CopyLogEntriesRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -1187,7 +1280,7 @@ def copy_log_entries(
return self._stubs["copy_log_entries"]
def close(self):
- self.grpc_channel.close()
+ self._logged_channel.close()
@property
def cancel_operation(
@@ -1199,7 +1292,7 @@ def cancel_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
- self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["cancel_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
@@ -1216,7 +1309,7 @@ def get_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
- self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["get_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
@@ -1235,7 +1328,7 @@ def list_operations(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
- self._stubs["list_operations"] = self.grpc_channel.unary_unary(
+ self._stubs["list_operations"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py
index f37ba9cb1..2686f80e3 100644
--- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py
+++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,16 +13,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import inspect
+import json
+import pickle
+import logging as std_logging
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.logging_v2.types import logging_config
@@ -31,6 +40,82 @@
from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO
from .grpc import ConfigServiceV2GrpcTransport
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientAIOInterceptor(
+ grpc.aio.UnaryUnaryClientInterceptor
+): # pragma: NO COVER
+ async def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.logging.v2.ConfigServiceV2",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = await continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = await response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = await response
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response to rpc {client_call_details.method}.",
+ extra={
+ "serviceName": "google.logging.v2.ConfigServiceV2",
+ "rpcName": str(client_call_details.method),
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport):
"""gRPC AsyncIO backend transport for ConfigServiceV2.
@@ -68,7 +153,6 @@ def create_channel(
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -98,7 +182,7 @@ def __init__(
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
- channel: Optional[aio.Channel] = None,
+ channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
@@ -112,21 +196,24 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
- channel (Optional[aio.Channel]): A ``Channel`` instance through
- which to make calls.
+ channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+ A ``Channel`` instance through which to make calls, or a Callable
+ that constructs and returns one. If set to None, ``self.create_channel``
+ is used to create the channel. If a Callable is given, it will be called
+ with the same arguments as used in ``self.create_channel``.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
@@ -136,11 +223,11 @@ def __init__(
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for the grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if a ``channel`` instance is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
- ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -167,9 +254,10 @@ def __init__(
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- if channel:
+ if isinstance(channel, aio.Channel):
# Ignore credentials if a channel was passed.
- credentials = False
+ credentials = None
+ self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
@@ -207,7 +295,9 @@ def __init__(
)
if not self._grpc_channel:
- self._grpc_channel = type(self).create_channel(
+ # initialize with the provided callable or the default channel
+ channel_init = channel or type(self).create_channel
+ self._grpc_channel = channel_init(
self._host,
# use the credentials which are saved
credentials=self._credentials,
@@ -223,7 +313,13 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientAIOInterceptor()
+ self._grpc_channel._unary_unary_interceptors.append(self._interceptor)
+ self._logged_channel = self._grpc_channel
+ self._wrap_with_kind = (
+ "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+ )
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@property
@@ -246,7 +342,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient:
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
- self.grpc_channel
+ self._logged_channel
)
# Return the client from cache.
@@ -274,7 +370,7 @@ def list_buckets(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_buckets" not in self._stubs:
- self._stubs["list_buckets"] = self.grpc_channel.unary_unary(
+ self._stubs["list_buckets"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListBuckets",
request_serializer=logging_config.ListBucketsRequest.serialize,
response_deserializer=logging_config.ListBucketsResponse.deserialize,
@@ -302,7 +398,7 @@ def get_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_bucket" not in self._stubs:
- self._stubs["get_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["get_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetBucket",
request_serializer=logging_config.GetBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
@@ -333,7 +429,7 @@ def create_bucket_async(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_bucket_async" not in self._stubs:
- self._stubs["create_bucket_async"] = self.grpc_channel.unary_unary(
+ self._stubs["create_bucket_async"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateBucketAsync",
request_serializer=logging_config.CreateBucketRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -367,7 +463,7 @@ def update_bucket_async(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_bucket_async" not in self._stubs:
- self._stubs["update_bucket_async"] = self.grpc_channel.unary_unary(
+ self._stubs["update_bucket_async"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateBucketAsync",
request_serializer=logging_config.UpdateBucketRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -397,7 +493,7 @@ def create_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_bucket" not in self._stubs:
- self._stubs["create_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["create_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateBucket",
request_serializer=logging_config.CreateBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
@@ -431,7 +527,7 @@ def update_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_bucket" not in self._stubs:
- self._stubs["update_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["update_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateBucket",
request_serializer=logging_config.UpdateBucketRequest.serialize,
response_deserializer=logging_config.LogBucket.deserialize,
@@ -462,7 +558,7 @@ def delete_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_bucket" not in self._stubs:
- self._stubs["delete_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteBucket",
request_serializer=logging_config.DeleteBucketRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -490,7 +586,7 @@ def undelete_bucket(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "undelete_bucket" not in self._stubs:
- self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary(
+ self._stubs["undelete_bucket"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UndeleteBucket",
request_serializer=logging_config.UndeleteBucketRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -518,7 +614,7 @@ def list_views(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_views" not in self._stubs:
- self._stubs["list_views"] = self.grpc_channel.unary_unary(
+ self._stubs["list_views"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListViews",
request_serializer=logging_config.ListViewsRequest.serialize,
response_deserializer=logging_config.ListViewsResponse.deserialize,
@@ -544,7 +640,7 @@ def get_view(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_view" not in self._stubs:
- self._stubs["get_view"] = self.grpc_channel.unary_unary(
+ self._stubs["get_view"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetView",
request_serializer=logging_config.GetViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
@@ -573,7 +669,7 @@ def create_view(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_view" not in self._stubs:
- self._stubs["create_view"] = self.grpc_channel.unary_unary(
+ self._stubs["create_view"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateView",
request_serializer=logging_config.CreateViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
@@ -605,7 +701,7 @@ def update_view(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_view" not in self._stubs:
- self._stubs["update_view"] = self.grpc_channel.unary_unary(
+ self._stubs["update_view"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateView",
request_serializer=logging_config.UpdateViewRequest.serialize,
response_deserializer=logging_config.LogView.deserialize,
@@ -634,7 +730,7 @@ def delete_view(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_view" not in self._stubs:
- self._stubs["delete_view"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_view"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteView",
request_serializer=logging_config.DeleteViewRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -662,7 +758,7 @@ def list_sinks(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_sinks" not in self._stubs:
- self._stubs["list_sinks"] = self.grpc_channel.unary_unary(
+ self._stubs["list_sinks"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListSinks",
request_serializer=logging_config.ListSinksRequest.serialize,
response_deserializer=logging_config.ListSinksResponse.deserialize,
@@ -688,7 +784,7 @@ def get_sink(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_sink" not in self._stubs:
- self._stubs["get_sink"] = self.grpc_channel.unary_unary(
+ self._stubs["get_sink"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetSink",
request_serializer=logging_config.GetSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
@@ -720,7 +816,7 @@ def create_sink(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_sink" not in self._stubs:
- self._stubs["create_sink"] = self.grpc_channel.unary_unary(
+ self._stubs["create_sink"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateSink",
request_serializer=logging_config.CreateSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
@@ -753,7 +849,7 @@ def update_sink(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_sink" not in self._stubs:
- self._stubs["update_sink"] = self.grpc_channel.unary_unary(
+ self._stubs["update_sink"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateSink",
request_serializer=logging_config.UpdateSinkRequest.serialize,
response_deserializer=logging_config.LogSink.deserialize,
@@ -780,7 +876,7 @@ def delete_sink(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_sink" not in self._stubs:
- self._stubs["delete_sink"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_sink"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteSink",
request_serializer=logging_config.DeleteSinkRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -811,7 +907,7 @@ def create_link(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_link" not in self._stubs:
- self._stubs["create_link"] = self.grpc_channel.unary_unary(
+ self._stubs["create_link"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateLink",
request_serializer=logging_config.CreateLinkRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -840,7 +936,7 @@ def delete_link(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_link" not in self._stubs:
- self._stubs["delete_link"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_link"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteLink",
request_serializer=logging_config.DeleteLinkRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
@@ -868,7 +964,7 @@ def list_links(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_links" not in self._stubs:
- self._stubs["list_links"] = self.grpc_channel.unary_unary(
+ self._stubs["list_links"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListLinks",
request_serializer=logging_config.ListLinksRequest.serialize,
response_deserializer=logging_config.ListLinksResponse.deserialize,
@@ -894,7 +990,7 @@ def get_link(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_link" not in self._stubs:
- self._stubs["get_link"] = self.grpc_channel.unary_unary(
+ self._stubs["get_link"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetLink",
request_serializer=logging_config.GetLinkRequest.serialize,
response_deserializer=logging_config.Link.deserialize,
@@ -924,7 +1020,7 @@ def list_exclusions(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_exclusions" not in self._stubs:
- self._stubs["list_exclusions"] = self.grpc_channel.unary_unary(
+ self._stubs["list_exclusions"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/ListExclusions",
request_serializer=logging_config.ListExclusionsRequest.serialize,
response_deserializer=logging_config.ListExclusionsResponse.deserialize,
@@ -952,7 +1048,7 @@ def get_exclusion(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_exclusion" not in self._stubs:
- self._stubs["get_exclusion"] = self.grpc_channel.unary_unary(
+ self._stubs["get_exclusion"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetExclusion",
request_serializer=logging_config.GetExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
@@ -982,7 +1078,7 @@ def create_exclusion(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_exclusion" not in self._stubs:
- self._stubs["create_exclusion"] = self.grpc_channel.unary_unary(
+ self._stubs["create_exclusion"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CreateExclusion",
request_serializer=logging_config.CreateExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
@@ -1011,7 +1107,7 @@ def update_exclusion(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_exclusion" not in self._stubs:
- self._stubs["update_exclusion"] = self.grpc_channel.unary_unary(
+ self._stubs["update_exclusion"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateExclusion",
request_serializer=logging_config.UpdateExclusionRequest.serialize,
response_deserializer=logging_config.LogExclusion.deserialize,
@@ -1037,7 +1133,7 @@ def delete_exclusion(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_exclusion" not in self._stubs:
- self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_exclusion"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/DeleteExclusion",
request_serializer=logging_config.DeleteExclusionRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -1074,7 +1170,7 @@ def get_cmek_settings(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_cmek_settings" not in self._stubs:
- self._stubs["get_cmek_settings"] = self.grpc_channel.unary_unary(
+ self._stubs["get_cmek_settings"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetCmekSettings",
request_serializer=logging_config.GetCmekSettingsRequest.serialize,
response_deserializer=logging_config.CmekSettings.deserialize,
@@ -1117,7 +1213,7 @@ def update_cmek_settings(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_cmek_settings" not in self._stubs:
- self._stubs["update_cmek_settings"] = self.grpc_channel.unary_unary(
+ self._stubs["update_cmek_settings"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateCmekSettings",
request_serializer=logging_config.UpdateCmekSettingsRequest.serialize,
response_deserializer=logging_config.CmekSettings.deserialize,
@@ -1155,7 +1251,7 @@ def get_settings(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_settings" not in self._stubs:
- self._stubs["get_settings"] = self.grpc_channel.unary_unary(
+ self._stubs["get_settings"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/GetSettings",
request_serializer=logging_config.GetSettingsRequest.serialize,
response_deserializer=logging_config.Settings.deserialize,
@@ -1200,7 +1296,7 @@ def update_settings(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_settings" not in self._stubs:
- self._stubs["update_settings"] = self.grpc_channel.unary_unary(
+ self._stubs["update_settings"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/UpdateSettings",
request_serializer=logging_config.UpdateSettingsRequest.serialize,
response_deserializer=logging_config.Settings.deserialize,
@@ -1229,15 +1325,281 @@ def copy_log_entries(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "copy_log_entries" not in self._stubs:
- self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary(
+ self._stubs["copy_log_entries"] = self._logged_channel.unary_unary(
"/google.logging.v2.ConfigServiceV2/CopyLogEntries",
request_serializer=logging_config.CopyLogEntriesRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["copy_log_entries"]
+ def _prep_wrapped_messages(self, client_info):
+ """Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+ self._wrapped_methods = {
+ self.list_buckets: self._wrap_method(
+ self.list_buckets,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_bucket: self._wrap_method(
+ self.get_bucket,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.create_bucket_async: self._wrap_method(
+ self.create_bucket_async,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.update_bucket_async: self._wrap_method(
+ self.update_bucket_async,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.create_bucket: self._wrap_method(
+ self.create_bucket,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.update_bucket: self._wrap_method(
+ self.update_bucket,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.delete_bucket: self._wrap_method(
+ self.delete_bucket,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.undelete_bucket: self._wrap_method(
+ self.undelete_bucket,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_views: self._wrap_method(
+ self.list_views,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_view: self._wrap_method(
+ self.get_view,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.create_view: self._wrap_method(
+ self.create_view,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.update_view: self._wrap_method(
+ self.update_view,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.delete_view: self._wrap_method(
+ self.delete_view,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_sinks: self._wrap_method(
+ self.list_sinks,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.get_sink: self._wrap_method(
+ self.get_sink,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.create_sink: self._wrap_method(
+ self.create_sink,
+ default_timeout=120.0,
+ client_info=client_info,
+ ),
+ self.update_sink: self._wrap_method(
+ self.update_sink,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.delete_sink: self._wrap_method(
+ self.delete_sink,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.create_link: self._wrap_method(
+ self.create_link,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.delete_link: self._wrap_method(
+ self.delete_link,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_links: self._wrap_method(
+ self.list_links,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_link: self._wrap_method(
+ self.get_link,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_exclusions: self._wrap_method(
+ self.list_exclusions,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.get_exclusion: self._wrap_method(
+ self.get_exclusion,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.create_exclusion: self._wrap_method(
+ self.create_exclusion,
+ default_timeout=120.0,
+ client_info=client_info,
+ ),
+ self.update_exclusion: self._wrap_method(
+ self.update_exclusion,
+ default_timeout=120.0,
+ client_info=client_info,
+ ),
+ self.delete_exclusion: self._wrap_method(
+ self.delete_exclusion,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.get_cmek_settings: self._wrap_method(
+ self.get_cmek_settings,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.update_cmek_settings: self._wrap_method(
+ self.update_cmek_settings,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_settings: self._wrap_method(
+ self.get_settings,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.update_settings: self._wrap_method(
+ self.update_settings,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.copy_log_entries: self._wrap_method(
+ self.copy_log_entries,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.cancel_operation: self._wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: self._wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: self._wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ }
+
+ def _wrap_method(self, func, *args, **kwargs):
+ if self._wrap_with_kind: # pragma: NO COVER
+ kwargs["kind"] = self.kind
+ return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
def close(self):
- return self.grpc_channel.close()
+ return self._logged_channel.close()
+
+ @property
+ def kind(self) -> str:
+ return "grpc_asyncio"
@property
def cancel_operation(
@@ -1249,7 +1611,7 @@ def cancel_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
- self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["cancel_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
@@ -1266,7 +1628,7 @@ def get_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
- self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["get_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
@@ -1285,7 +1647,7 @@ def list_operations(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
- self._stubs["list_operations"] = self.grpc_channel.unary_unary(
+ self._stubs["list_operations"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
diff --git a/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/__init__.py
index 134609c93..41c0dc4fa 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/__init__.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py
index dcf622ac2..8de507845 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,11 +13,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging as std_logging
from collections import OrderedDict
-import functools
import re
from typing import (
Dict,
+ Callable,
Mapping,
MutableMapping,
MutableSequence,
@@ -36,14 +37,16 @@
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
-from google.api_core import retry as retries
+from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
+
try:
- OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
+ OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
- OptionalRetry = Union[retries.Retry, object] # type: ignore
+ OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore
from google.api import monitored_resource_pb2 # type: ignore
from google.cloud.logging_v2.services.logging_service_v2 import pagers
@@ -54,14 +57,27 @@
from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport
from .client import LoggingServiceV2Client
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
class LoggingServiceV2AsyncClient:
"""Service for ingesting and querying logs."""
_client: LoggingServiceV2Client
+ # Copy defaults from the synchronous client for use here.
+ # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ _DEFAULT_ENDPOINT_TEMPLATE = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE
+ _DEFAULT_UNIVERSE = LoggingServiceV2Client._DEFAULT_UNIVERSE
log_path = staticmethod(LoggingServiceV2Client.log_path)
parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path)
@@ -168,19 +184,40 @@ def transport(self) -> LoggingServiceV2Transport:
"""
return self._client.transport
- get_transport_class = functools.partial(
- type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client)
- )
+ @property
+ def api_endpoint(self):
+ """Return the API endpoint used by the client instance.
+
+ Returns:
+ str: The API endpoint used by the client instance.
+ """
+ return self._client._api_endpoint
+
+ @property
+ def universe_domain(self) -> str:
+ """Return the universe domain used by the client instance.
+
+ Returns:
+ str: The universe domain used
+ by the client instance.
+ """
+ return self._client._universe_domain
+
+ get_transport_class = LoggingServiceV2Client.get_transport_class
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
- transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio",
+ transport: Optional[
+ Union[
+ str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]
+ ]
+ ] = "grpc_asyncio",
client_options: Optional[ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiates the logging service v2 client.
+ """Instantiates the logging service v2 async client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -188,26 +225,43 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.LoggingServiceV2Transport]): The
- transport to use. If set to None, a transport is chosen
- automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
+ transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]):
+ The transport to use, or a Callable that constructs and returns a new transport to use.
+ If a Callable is given, it will be called with the same set of initialization
+ arguments as used in the LoggingServiceV2Transport constructor.
+ If set to None, a transport is chosen automatically.
+ client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+ Custom options for the client.
+
+ 1. The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client when ``transport`` is
+ not explicitly provided. Only if this property is not set and
+ ``transport`` was not explicitly provided, the endpoint is
+ determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+ variable, which have one of the following values:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ use the default regular endpoint) and "auto" (auto-switch to the
+ default mTLS endpoint if client certificate is present; this is
+ the default value).
+
+ 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
+ to provide a client certificate for mTLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
+ 3. The ``universe_domain`` property can be used to override the
+ default "googleapis.com" universe. Note that ``api_endpoint``
+ property still takes precedence; and ``universe_domain`` is
+ currently not supported for mTLS.
+
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
@@ -219,6 +273,28 @@ def __init__(
client_info=client_info,
)
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.",
+ extra={
+ "serviceName": "google.logging.v2.LoggingServiceV2",
+ "universeDomain": getattr(
+ self._client._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._client._transport, "_credentials")
+ else {
+ "serviceName": "google.logging.v2.LoggingServiceV2",
+ "credentialsType": None,
+ },
+ )
+
async def delete_log(
self,
request: Optional[Union[logging.DeleteLogRequest, dict]] = None,
@@ -226,7 +302,7 @@ async def delete_log(
log_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes all the log entries in a log for the \_Default Log
Bucket. The log reappears if it receives new entries. Log
@@ -278,23 +354,31 @@ async def sample_delete_log():
This corresponds to the ``log_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([log_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [log_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging.DeleteLogRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging.DeleteLogRequest):
+ request = logging.DeleteLogRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -303,22 +387,9 @@ async def sample_delete_log():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_log,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.delete_log
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -326,6 +397,9 @@ async def sample_delete_log():
gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -344,7 +418,7 @@ async def write_log_entries(
entries: Optional[MutableSequence[log_entry.LogEntry]] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging.WriteLogEntriesResponse:
r"""Writes log entries to Logging. This API method is the
only way to send log entries to Logging. This method is
@@ -472,27 +546,35 @@ async def sample_write_log_entries():
This corresponds to the ``entries`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.WriteLogEntriesResponse:
Result returned from WriteLogEntries.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([log_name, resource, labels, entries])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [log_name, resource, labels, entries]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging.WriteLogEntriesRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging.WriteLogEntriesRequest):
+ request = logging.WriteLogEntriesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -508,22 +590,12 @@ async def sample_write_log_entries():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.write_log_entries,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.write_log_entries
+ ]
+
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
# Send the request.
response = await rpc(
@@ -545,7 +617,7 @@ async def list_log_entries(
order_by: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListLogEntriesAsyncPager:
r"""Lists log entries. Use this method to retrieve log entries that
originated from a project/folder/organization/billing account.
@@ -629,11 +701,13 @@ async def sample_list_log_entries():
This corresponds to the ``order_by`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager:
@@ -644,16 +718,22 @@ async def sample_list_log_entries():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([resource_names, filter, order_by])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [resource_names, filter, order_by]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging.ListLogEntriesRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging.ListLogEntriesRequest):
+ request = logging.ListLogEntriesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -666,22 +746,12 @@ async def sample_list_log_entries():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_log_entries,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_log_entries
+ ]
+
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
# Send the request.
response = await rpc(
@@ -697,6 +767,8 @@ async def sample_list_log_entries():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -711,7 +783,7 @@ async def list_monitored_resource_descriptors(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListMonitoredResourceDescriptorsAsyncPager:
r"""Lists the descriptors for monitored resource types
used by Logging.
@@ -746,11 +818,13 @@ async def sample_list_monitored_resource_descriptors():
request (Optional[Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]]):
The request object. The parameters to
ListMonitoredResourceDescriptors
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager:
@@ -762,26 +836,19 @@ async def sample_list_monitored_resource_descriptors():
"""
# Create or coerce a protobuf request object.
- request = logging.ListMonitoredResourceDescriptorsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest):
+ request = logging.ListMonitoredResourceDescriptorsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_monitored_resource_descriptors,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_monitored_resource_descriptors
+ ]
+
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
# Send the request.
response = await rpc(
@@ -797,6 +864,8 @@ async def sample_list_monitored_resource_descriptors():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -810,7 +879,7 @@ async def list_logs(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListLogsAsyncPager:
r"""Lists the logs in projects, organizations, folders,
or billing accounts. Only logs that have entries are
@@ -857,11 +926,13 @@ async def sample_list_logs():
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager:
@@ -873,16 +944,22 @@ async def sample_list_logs():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging.ListLogsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging.ListLogsRequest):
+ request = logging.ListLogsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -891,22 +968,9 @@ async def sample_list_logs():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_logs,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_logs
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -914,6 +978,9 @@ async def sample_list_logs():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -928,6 +995,8 @@ async def sample_list_logs():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -940,7 +1009,7 @@ def tail_log_entries(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]:
r"""Streaming read of log entries as they are ingested.
Until the stream is terminated, it will continue reading
@@ -986,11 +1055,13 @@ def request_generator():
Args:
requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]):
The request object AsyncIterator. The parameters to ``TailLogEntries``.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]:
@@ -999,22 +1070,12 @@ def request_generator():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.tail_log_entries,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=3600.0,
- ),
- default_timeout=3600.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.tail_log_entries
+ ]
+
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
# Send the request.
response = rpc(
@@ -1033,7 +1094,7 @@ async def list_operations(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
@@ -1041,11 +1102,13 @@ async def list_operations(
request (:class:`~.operations_pb2.ListOperationsRequest`):
The request object. Request message for
`ListOperations` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
@@ -1058,11 +1121,7 @@ async def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1070,6 +1129,9 @@ async def list_operations(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1087,7 +1149,7 @@ async def get_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
@@ -1095,11 +1157,13 @@ async def get_operation(
request (:class:`~.operations_pb2.GetOperationRequest`):
The request object. Request message for
`GetOperation` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
@@ -1112,11 +1176,7 @@ async def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1124,6 +1184,9 @@ async def get_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -1141,7 +1204,7 @@ async def cancel_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
@@ -1153,11 +1216,13 @@ async def cancel_operation(
request (:class:`~.operations_pb2.CancelOperationRequest`):
The request object. Request message for
`CancelOperation` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
None
"""
@@ -1169,11 +1234,7 @@ async def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1181,6 +1242,9 @@ async def cancel_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -1200,5 +1264,8 @@ async def __aexit__(self, exc_type, exc, tb):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
__all__ = ("LoggingServiceV2AsyncClient",)
diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py
index ce60602c6..22318f07a 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/client.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,10 +14,14 @@
# limitations under the License.
#
from collections import OrderedDict
+from http import HTTPStatus
+import json
+import logging as std_logging
import os
import re
from typing import (
Dict,
+ Callable,
Mapping,
MutableMapping,
MutableSequence,
@@ -30,6 +34,7 @@
Union,
cast,
)
+import warnings
from google.cloud.logging_v2 import gapic_version as package_version
@@ -42,11 +47,21 @@
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
- OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
+ OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
- OptionalRetry = Union[retries.Retry, object] # type: ignore
+ OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
from google.api import monitored_resource_pb2 # type: ignore
from google.cloud.logging_v2.services.logging_service_v2 import pagers
@@ -127,11 +142,15 @@ def _get_default_mtls_endpoint(api_endpoint):
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+ # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
DEFAULT_ENDPOINT = "logging.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
+ _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}"
+ _DEFAULT_UNIVERSE = "googleapis.com"
+
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -277,7 +296,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]:
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
- """Return the API endpoint and client cert source for mutual TLS.
+ """Deprecated. Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
@@ -307,6 +326,11 @@ def get_mtls_endpoint_and_cert_source(
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
+
+ warnings.warn(
+ "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+ DeprecationWarning,
+ )
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
@@ -340,11 +364,180 @@ def get_mtls_endpoint_and_cert_source(
return api_endpoint, client_cert_source
+ @staticmethod
+ def _read_environment_variables():
+ """Returns the environment variables used by the client.
+
+ Returns:
+ Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+ GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+ Raises:
+ ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+ any of ["true", "false"].
+ google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+ is not any of ["auto", "never", "always"].
+ """
+ use_client_cert = os.getenv(
+ "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
+ ).lower()
+ use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+ universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+ if use_client_cert not in ("true", "false"):
+ raise ValueError(
+ "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+ if use_mtls_endpoint not in ("auto", "never", "always"):
+ raise MutualTLSChannelError(
+ "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+ return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+ @staticmethod
+ def _get_client_cert_source(provided_cert_source, use_cert_flag):
+ """Return the client cert source to be used by the client.
+
+ Args:
+ provided_cert_source (bytes): The client certificate source provided.
+ use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+ Returns:
+ bytes or None: The client cert source to be used by the client.
+ """
+ client_cert_source = None
+ if use_cert_flag:
+ if provided_cert_source:
+ client_cert_source = provided_cert_source
+ elif mtls.has_default_client_cert_source():
+ client_cert_source = mtls.default_client_cert_source()
+ return client_cert_source
+
+ @staticmethod
+ def _get_api_endpoint(
+ api_override, client_cert_source, universe_domain, use_mtls_endpoint
+ ):
+ """Return the API endpoint used by the client.
+
+ Args:
+ api_override (str): The API endpoint override. If specified, this is always
+ the return value of this function and the other arguments are not used.
+ client_cert_source (bytes): The client certificate source used by the client.
+ universe_domain (str): The universe domain used by the client.
+ use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+ Possible values are "always", "auto", or "never".
+
+ Returns:
+ str: The API endpoint to be used by the client.
+ """
+ if api_override is not None:
+ api_endpoint = api_override
+ elif use_mtls_endpoint == "always" or (
+ use_mtls_endpoint == "auto" and client_cert_source
+ ):
+ _default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE
+ if universe_domain != _default_universe:
+ raise MutualTLSChannelError(
+ f"mTLS is not supported in any universe other than {_default_universe}."
+ )
+ api_endpoint = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=universe_domain
+ )
+ return api_endpoint
+
+ @staticmethod
+ def _get_universe_domain(
+ client_universe_domain: Optional[str], universe_domain_env: Optional[str]
+ ) -> str:
+ """Return the universe domain used by the client.
+
+ Args:
+ client_universe_domain (Optional[str]): The universe domain configured via the client options.
+ universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+ Returns:
+ str: The universe domain to be used by the client.
+
+ Raises:
+ ValueError: If the universe domain is an empty string.
+ """
+ universe_domain = LoggingServiceV2Client._DEFAULT_UNIVERSE
+ if client_universe_domain is not None:
+ universe_domain = client_universe_domain
+ elif universe_domain_env is not None:
+ universe_domain = universe_domain_env
+ if len(universe_domain.strip()) == 0:
+ raise ValueError("Universe Domain cannot be an empty string.")
+ return universe_domain
+
+ def _validate_universe_domain(self):
+ """Validates client's and credentials' universe domains are consistent.
+
+ Returns:
+ bool: True iff the configured universe domain is valid.
+
+ Raises:
+ ValueError: If the configured universe domain is not valid.
+ """
+
+ # NOTE (b/349488459): universe validation is disabled until further notice.
+ return True
+
+ def _add_cred_info_for_auth_errors(
+ self, error: core_exceptions.GoogleAPICallError
+ ) -> None:
+ """Adds credential info string to error details for 401/403/404 errors.
+
+ Args:
+ error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info.
+ """
+ if error.code not in [
+ HTTPStatus.UNAUTHORIZED,
+ HTTPStatus.FORBIDDEN,
+ HTTPStatus.NOT_FOUND,
+ ]:
+ return
+
+ cred = self._transport._credentials
+
+ # get_cred_info is only available in google-auth>=2.35.0
+ if not hasattr(cred, "get_cred_info"):
+ return
+
+ # ignore the type check since pypy test fails when get_cred_info
+ # is not available
+ cred_info = cred.get_cred_info() # type: ignore
+ if cred_info and hasattr(error._details, "append"):
+ error._details.append(json.dumps(cred_info))
+
+ @property
+ def api_endpoint(self):
+ """Return the API endpoint used by the client instance.
+
+ Returns:
+ str: The API endpoint used by the client instance.
+ """
+ return self._api_endpoint
+
+ @property
+ def universe_domain(self) -> str:
+ """Return the universe domain used by the client instance.
+
+ Returns:
+ str: The universe domain used by the client instance.
+ """
+ return self._universe_domain
+
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
- transport: Optional[Union[str, LoggingServiceV2Transport]] = None,
+ transport: Optional[
+ Union[
+ str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]
+ ]
+ ] = None,
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -356,25 +549,37 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, LoggingServiceV2Transport]): The
- transport to use. If set to None, a transport is chosen
- automatically.
- client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the
- client. It won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
+ transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]):
+ The transport to use, or a Callable that constructs and returns a new transport.
+ If a Callable is given, it will be called with the same set of initialization
+ arguments as used in the LoggingServiceV2Transport constructor.
+ If set to None, a transport is chosen automatically.
+ client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+ Custom options for the client.
+
+ 1. The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client when ``transport`` is
+ not explicitly provided. Only if this property is not set and
+ ``transport`` was not explicitly provided, the endpoint is
+ determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+ variable, which have one of the following values:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ use the default regular endpoint) and "auto" (auto-switch to the
+ default mTLS endpoint if client certificate is present; this is
+ the default value).
+
+ 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
+ to provide a client certificate for mTLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
+
+ 3. The ``universe_domain`` property can be used to override the
+ default "googleapis.com" universe. Note that the ``api_endpoint``
+ property still takes precedence; and ``universe_domain`` is
+ currently not supported for mTLS.
+
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
@@ -385,17 +590,38 @@ def __init__(
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
- if isinstance(client_options, dict):
- client_options = client_options_lib.from_dict(client_options)
- if client_options is None:
- client_options = client_options_lib.ClientOptions()
- client_options = cast(client_options_lib.ClientOptions, client_options)
+ self._client_options = client_options
+ if isinstance(self._client_options, dict):
+ self._client_options = client_options_lib.from_dict(self._client_options)
+ if self._client_options is None:
+ self._client_options = client_options_lib.ClientOptions()
+ self._client_options = cast(
+ client_options_lib.ClientOptions, self._client_options
+ )
+
+ universe_domain_opt = getattr(self._client_options, "universe_domain", None)
- api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
- client_options
+ (
+ self._use_client_cert,
+ self._use_mtls_endpoint,
+ self._universe_domain_env,
+ ) = LoggingServiceV2Client._read_environment_variables()
+ self._client_cert_source = LoggingServiceV2Client._get_client_cert_source(
+ self._client_options.client_cert_source, self._use_client_cert
)
+ self._universe_domain = LoggingServiceV2Client._get_universe_domain(
+ universe_domain_opt, self._universe_domain_env
+ )
+ self._api_endpoint = None # updated below, depending on `transport`
+
+ # Initialize the universe domain validation.
+ self._is_universe_domain_valid = False
+
+ if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER
+ # Setup logging.
+ client_logging.initialize_logging()
- api_key_value = getattr(client_options, "api_key", None)
+ api_key_value = getattr(self._client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
@@ -404,20 +630,33 @@ def __init__(
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
- if isinstance(transport, LoggingServiceV2Transport):
+ transport_provided = isinstance(transport, LoggingServiceV2Transport)
+ if transport_provided:
# transport is a LoggingServiceV2Transport instance.
- if credentials or client_options.credentials_file or api_key_value:
+ if credentials or self._client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
- if client_options.scopes:
+ if self._client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
- self._transport = transport
- else:
+ self._transport = cast(LoggingServiceV2Transport, transport)
+ self._api_endpoint = self._transport.host
+
+ self._api_endpoint = (
+ self._api_endpoint
+ or LoggingServiceV2Client._get_api_endpoint(
+ self._client_options.api_endpoint,
+ self._client_cert_source,
+ self._universe_domain,
+ self._use_mtls_endpoint,
+ )
+ )
+
+ if not transport_provided:
import google.auth._default # type: ignore
if api_key_value and hasattr(
@@ -427,19 +666,50 @@ def __init__(
api_key_value
)
- Transport = type(self).get_transport_class(transport)
- self._transport = Transport(
+ transport_init: Union[
+ Type[LoggingServiceV2Transport],
+ Callable[..., LoggingServiceV2Transport],
+ ] = (
+ LoggingServiceV2Client.get_transport_class(transport)
+ if isinstance(transport, str) or transport is None
+ else cast(Callable[..., LoggingServiceV2Transport], transport)
+ )
+ # initialize with the provided callable or the passed in class
+ self._transport = transport_init(
credentials=credentials,
- credentials_file=client_options.credentials_file,
- host=api_endpoint,
- scopes=client_options.scopes,
- client_cert_source_for_mtls=client_cert_source_func,
- quota_project_id=client_options.quota_project_id,
+ credentials_file=self._client_options.credentials_file,
+ host=self._api_endpoint,
+ scopes=self._client_options.scopes,
+ client_cert_source_for_mtls=self._client_cert_source,
+ quota_project_id=self._client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
- api_audience=client_options.api_audience,
+ api_audience=self._client_options.api_audience,
)
+ if "async" not in str(self._transport):
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.logging_v2.LoggingServiceV2Client`.",
+ extra={
+ "serviceName": "google.logging.v2.LoggingServiceV2",
+ "universeDomain": getattr(
+ self._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._transport, "_credentials")
+ else {
+ "serviceName": "google.logging.v2.LoggingServiceV2",
+ "credentialsType": None,
+ },
+ )
+
def delete_log(
self,
request: Optional[Union[logging.DeleteLogRequest, dict]] = None,
@@ -447,7 +717,7 @@ def delete_log(
log_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes all the log entries in a log for the \_Default Log
Bucket. The log reappears if it receives new entries. Log
@@ -502,23 +772,26 @@ def sample_delete_log():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([log_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [log_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging.DeleteLogRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging.DeleteLogRequest):
request = logging.DeleteLogRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -536,6 +809,9 @@ def sample_delete_log():
gapic_v1.routing_header.to_grpc_metadata((("log_name", request.log_name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -554,7 +830,7 @@ def write_log_entries(
entries: Optional[MutableSequence[log_entry.LogEntry]] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging.WriteLogEntriesResponse:
r"""Writes log entries to Logging. This API method is the
only way to send log entries to Logging. This method is
@@ -685,27 +961,30 @@ def sample_write_log_entries():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.WriteLogEntriesResponse:
Result returned from WriteLogEntries.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([log_name, resource, labels, entries])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [log_name, resource, labels, entries]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging.WriteLogEntriesRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging.WriteLogEntriesRequest):
request = logging.WriteLogEntriesRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -723,6 +1002,9 @@ def sample_write_log_entries():
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.write_log_entries]
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -743,7 +1025,7 @@ def list_log_entries(
order_by: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListLogEntriesPager:
r"""Lists log entries. Use this method to retrieve log entries that
originated from a project/folder/organization/billing account.
@@ -830,8 +1112,10 @@ def sample_list_log_entries():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager:
@@ -842,19 +1126,20 @@ def sample_list_log_entries():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([resource_names, filter, order_by])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [resource_names, filter, order_by]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging.ListLogEntriesRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging.ListLogEntriesRequest):
request = logging.ListLogEntriesRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -870,6 +1155,9 @@ def sample_list_log_entries():
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_log_entries]
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -884,6 +1172,8 @@ def sample_list_log_entries():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -898,7 +1188,7 @@ def list_monitored_resource_descriptors(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListMonitoredResourceDescriptorsPager:
r"""Lists the descriptors for monitored resource types
used by Logging.
@@ -936,8 +1226,10 @@ def sample_list_monitored_resource_descriptors():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager:
@@ -949,10 +1241,8 @@ def sample_list_monitored_resource_descriptors():
"""
# Create or coerce a protobuf request object.
- # Minor optimization to avoid making a copy if the user passes
- # in a logging.ListMonitoredResourceDescriptorsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest):
request = logging.ListMonitoredResourceDescriptorsRequest(request)
@@ -962,6 +1252,9 @@ def sample_list_monitored_resource_descriptors():
self._transport.list_monitored_resource_descriptors
]
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -976,6 +1269,8 @@ def sample_list_monitored_resource_descriptors():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -989,7 +1284,7 @@ def list_logs(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListLogsPager:
r"""Lists the logs in projects, organizations, folders,
or billing accounts. Only logs that have entries are
@@ -1039,8 +1334,10 @@ def sample_list_logs():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager:
@@ -1052,19 +1349,20 @@ def sample_list_logs():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging.ListLogsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging.ListLogsRequest):
request = logging.ListLogsRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -1082,6 +1380,9 @@ def sample_list_logs():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -1096,6 +1397,8 @@ def sample_list_logs():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -1108,7 +1411,7 @@ def tail_log_entries(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> Iterable[logging.TailLogEntriesResponse]:
r"""Streaming read of log entries as they are ingested.
Until the stream is terminated, it will continue reading
@@ -1157,8 +1460,10 @@ def request_generator():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]:
@@ -1169,6 +1474,9 @@ def request_generator():
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.tail_log_entries]
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
requests,
@@ -1199,7 +1507,7 @@ def list_operations(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
@@ -1210,8 +1518,10 @@ def list_operations(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
@@ -1224,11 +1534,7 @@ def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1236,16 +1542,23 @@ def list_operations(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
- # Send the request.
- response = rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
+ # Validate the universe domain.
+ self._validate_universe_domain()
- # Done; return the response.
- return response
+ try:
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+ except core_exceptions.GoogleAPICallError as e:
+ self._add_cred_info_for_auth_errors(e)
+ raise e
def get_operation(
self,
@@ -1253,7 +1566,7 @@ def get_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
@@ -1264,8 +1577,10 @@ def get_operation(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
@@ -1278,11 +1593,7 @@ def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1290,16 +1601,23 @@ def get_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
- # Send the request.
- response = rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
+ # Validate the universe domain.
+ self._validate_universe_domain()
- # Done; return the response.
- return response
+ try:
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+ except core_exceptions.GoogleAPICallError as e:
+ self._add_cred_info_for_auth_errors(e)
+ raise e
def cancel_operation(
self,
@@ -1307,7 +1625,7 @@ def cancel_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
@@ -1322,8 +1640,10 @@ def cancel_operation(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
None
"""
@@ -1335,11 +1655,7 @@ def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1347,6 +1663,9 @@ def cancel_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -1360,5 +1679,7 @@ def cancel_operation(
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
__all__ = ("LoggingServiceV2Client",)
diff --git a/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/google/cloud/logging_v2/services/logging_service_v2/pagers.py
index 02dcf93b3..f19ad6304 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/pagers.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/pagers.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
from typing import (
Any,
AsyncIterator,
@@ -22,8 +25,18 @@
Tuple,
Optional,
Iterator,
+ Union,
)
+try:
+ OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+ OptionalAsyncRetry = Union[
+ retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None
+ ]
+except AttributeError: # pragma: NO COVER
+ OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+ OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore
+
from google.api import monitored_resource_pb2 # type: ignore
from google.cloud.logging_v2.types import log_entry
from google.cloud.logging_v2.types import logging
@@ -53,7 +66,9 @@ def __init__(
request: logging.ListLogEntriesRequest,
response: logging.ListLogEntriesResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -64,12 +79,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListLogEntriesResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging.ListLogEntriesRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -80,7 +102,12 @@ def pages(self) -> Iterator[logging.ListLogEntriesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[log_entry.LogEntry]:
@@ -115,7 +142,9 @@ def __init__(
request: logging.ListLogEntriesRequest,
response: logging.ListLogEntriesResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -126,12 +155,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListLogEntriesResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging.ListLogEntriesRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -142,7 +178,12 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]:
@@ -181,7 +222,9 @@ def __init__(
request: logging.ListMonitoredResourceDescriptorsRequest,
response: logging.ListMonitoredResourceDescriptorsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -192,12 +235,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging.ListMonitoredResourceDescriptorsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -208,7 +258,12 @@ def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]:
@@ -245,7 +300,9 @@ def __init__(
request: logging.ListMonitoredResourceDescriptorsRequest,
response: logging.ListMonitoredResourceDescriptorsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -256,12 +313,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging.ListMonitoredResourceDescriptorsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -274,7 +338,12 @@ async def pages(
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(
@@ -315,7 +384,9 @@ def __init__(
request: logging.ListLogsRequest,
response: logging.ListLogsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -326,12 +397,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListLogsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging.ListLogsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -342,7 +420,12 @@ def pages(self) -> Iterator[logging.ListLogsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[str]:
@@ -377,7 +460,9 @@ def __init__(
request: logging.ListLogsRequest,
response: logging.ListLogsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -388,12 +473,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListLogsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging.ListLogsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -404,7 +496,12 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(self) -> AsyncIterator[str]:
diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst b/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst
new file mode 100644
index 000000000..897a4c7bf
--- /dev/null
+++ b/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`LoggingServiceV2Transport` is the ABC for all transports.
+- public child `LoggingServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `LoggingServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseLoggingServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `LoggingServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py
index d7dae810b..48f0b711c 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py
index a256ca284..7f7cfe9a2 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -25,6 +25,7 @@
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
from google.cloud.logging_v2.types import logging
from google.longrunning import operations_pb2 # type: ignore
@@ -34,6 +35,9 @@
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class LoggingServiceV2Transport(abc.ABC):
"""Abstract transport class for LoggingServiceV2."""
@@ -65,7 +69,7 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -90,6 +94,8 @@ def __init__(
# Save the scopes.
self._scopes = scopes
+ if not hasattr(self, "_ignore_credentials"):
+ self._ignore_credentials: bool = False
# If no credentials are provided, then determine the appropriate
# defaults.
@@ -102,7 +108,7 @@ def __init__(
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
- elif credentials is None:
+ elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
@@ -128,6 +134,10 @@ def __init__(
host += ":443"
self._host = host
+ @property
+ def host(self):
+ return self._host
+
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -227,6 +237,21 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=3600.0,
client_info=client_info,
),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
}
def close(self):
diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py
index 775fcbf98..7bffe25b6 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import json
+import logging as std_logging
+import pickle
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
@@ -21,14 +24,91 @@
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from google.cloud.logging_v2.types import logging
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER
+ def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.logging.v2.LoggingServiceV2",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = response.result()
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response for {client_call_details.method}.",
+ extra={
+ "serviceName": "google.logging.v2.LoggingServiceV2",
+ "rpcName": client_call_details.method,
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport):
"""gRPC backend transport for LoggingServiceV2.
@@ -52,7 +132,7 @@ def __init__(
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
- channel: Optional[grpc.Channel] = None,
+ channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
@@ -66,20 +146,23 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
- ignored if ``channel`` is provided.
- channel (Optional[grpc.Channel]): A ``Channel`` instance through
- which to make calls.
+ ignored if a ``channel`` instance is provided.
+ channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+ A ``Channel`` instance through which to make calls, or a Callable
+ that constructs and returns one. If set to None, ``self.create_channel``
+ is used to create the channel. If a Callable is given, it will be called
+ with the same arguments as used in ``self.create_channel``.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
@@ -89,11 +172,11 @@ def __init__(
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for the grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if a ``channel`` instance is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
- ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -119,9 +202,10 @@ def __init__(
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- if channel:
+ if isinstance(channel, grpc.Channel):
# Ignore credentials if a channel was passed.
- credentials = False
+ credentials = None
+ self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
@@ -160,7 +244,9 @@ def __init__(
)
if not self._grpc_channel:
- self._grpc_channel = type(self).create_channel(
+ # initialize with the provided callable or the default channel
+ channel_init = channel or type(self).create_channel
+ self._grpc_channel = channel_init(
self._host,
# use the credentials which are saved
credentials=self._credentials,
@@ -176,7 +262,12 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientInterceptor()
+ self._logged_channel = grpc.intercept_channel(
+ self._grpc_channel, self._interceptor
+ )
+
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
@@ -252,7 +343,7 @@ def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]:
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_log" not in self._stubs:
- self._stubs["delete_log"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_log"] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/DeleteLog",
request_serializer=logging.DeleteLogRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -284,7 +375,7 @@ def write_log_entries(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "write_log_entries" not in self._stubs:
- self._stubs["write_log_entries"] = self.grpc_channel.unary_unary(
+ self._stubs["write_log_entries"] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/WriteLogEntries",
request_serializer=logging.WriteLogEntriesRequest.serialize,
response_deserializer=logging.WriteLogEntriesResponse.deserialize,
@@ -313,7 +404,7 @@ def list_log_entries(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_log_entries" not in self._stubs:
- self._stubs["list_log_entries"] = self.grpc_channel.unary_unary(
+ self._stubs["list_log_entries"] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListLogEntries",
request_serializer=logging.ListLogEntriesRequest.serialize,
response_deserializer=logging.ListLogEntriesResponse.deserialize,
@@ -346,7 +437,7 @@ def list_monitored_resource_descriptors(
if "list_monitored_resource_descriptors" not in self._stubs:
self._stubs[
"list_monitored_resource_descriptors"
- ] = self.grpc_channel.unary_unary(
+ ] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors",
request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize,
response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize,
@@ -374,7 +465,7 @@ def list_logs(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_logs" not in self._stubs:
- self._stubs["list_logs"] = self.grpc_channel.unary_unary(
+ self._stubs["list_logs"] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListLogs",
request_serializer=logging.ListLogsRequest.serialize,
response_deserializer=logging.ListLogsResponse.deserialize,
@@ -402,7 +493,7 @@ def tail_log_entries(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "tail_log_entries" not in self._stubs:
- self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream(
+ self._stubs["tail_log_entries"] = self._logged_channel.stream_stream(
"/google.logging.v2.LoggingServiceV2/TailLogEntries",
request_serializer=logging.TailLogEntriesRequest.serialize,
response_deserializer=logging.TailLogEntriesResponse.deserialize,
@@ -410,7 +501,7 @@ def tail_log_entries(
return self._stubs["tail_log_entries"]
def close(self):
- self.grpc_channel.close()
+ self._logged_channel.close()
@property
def cancel_operation(
@@ -422,7 +513,7 @@ def cancel_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
- self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["cancel_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
@@ -439,7 +530,7 @@ def get_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
- self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["get_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
@@ -458,7 +549,7 @@ def list_operations(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
- self._stubs["list_operations"] = self.grpc_channel.unary_unary(
+ self._stubs["list_operations"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py
index 5f1acd974..f73ac1150 100644
--- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py
+++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,15 +13,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import inspect
+import json
+import pickle
+import logging as std_logging
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.logging_v2.types import logging
@@ -30,6 +39,82 @@
from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO
from .grpc import LoggingServiceV2GrpcTransport
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientAIOInterceptor(
+ grpc.aio.UnaryUnaryClientInterceptor
+): # pragma: NO COVER
+ async def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.logging.v2.LoggingServiceV2",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = await continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = await response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = await response
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response to rpc {client_call_details.method}.",
+ extra={
+ "serviceName": "google.logging.v2.LoggingServiceV2",
+ "rpcName": str(client_call_details.method),
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport):
"""gRPC AsyncIO backend transport for LoggingServiceV2.
@@ -67,7 +152,6 @@ def create_channel(
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -97,7 +181,7 @@ def __init__(
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
- channel: Optional[aio.Channel] = None,
+ channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
@@ -111,21 +195,24 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
- channel (Optional[aio.Channel]): A ``Channel`` instance through
- which to make calls.
+ channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+ A ``Channel`` instance through which to make calls, or a Callable
+ that constructs and returns one. If set to None, ``self.create_channel``
+ is used to create the channel. If a Callable is given, it will be called
+ with the same arguments as used in ``self.create_channel``.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
@@ -135,11 +222,11 @@ def __init__(
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for the grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if a ``channel`` instance is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
- ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -165,9 +252,10 @@ def __init__(
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- if channel:
+ if isinstance(channel, aio.Channel):
# Ignore credentials if a channel was passed.
- credentials = False
+ credentials = None
+ self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
@@ -205,7 +293,9 @@ def __init__(
)
if not self._grpc_channel:
- self._grpc_channel = type(self).create_channel(
+ # initialize with the provided callable or the default channel
+ channel_init = channel or type(self).create_channel
+ self._grpc_channel = channel_init(
self._host,
# use the credentials which are saved
credentials=self._credentials,
@@ -221,7 +311,13 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientAIOInterceptor()
+ self._grpc_channel._unary_unary_interceptors.append(self._interceptor)
+ self._logged_channel = self._grpc_channel
+ self._wrap_with_kind = (
+ "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+ )
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@property
@@ -257,7 +353,7 @@ def delete_log(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_log" not in self._stubs:
- self._stubs["delete_log"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_log"] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/DeleteLog",
request_serializer=logging.DeleteLogRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -291,7 +387,7 @@ def write_log_entries(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "write_log_entries" not in self._stubs:
- self._stubs["write_log_entries"] = self.grpc_channel.unary_unary(
+ self._stubs["write_log_entries"] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/WriteLogEntries",
request_serializer=logging.WriteLogEntriesRequest.serialize,
response_deserializer=logging.WriteLogEntriesResponse.deserialize,
@@ -322,7 +418,7 @@ def list_log_entries(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_log_entries" not in self._stubs:
- self._stubs["list_log_entries"] = self.grpc_channel.unary_unary(
+ self._stubs["list_log_entries"] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListLogEntries",
request_serializer=logging.ListLogEntriesRequest.serialize,
response_deserializer=logging.ListLogEntriesResponse.deserialize,
@@ -355,7 +451,7 @@ def list_monitored_resource_descriptors(
if "list_monitored_resource_descriptors" not in self._stubs:
self._stubs[
"list_monitored_resource_descriptors"
- ] = self.grpc_channel.unary_unary(
+ ] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors",
request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize,
response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize,
@@ -383,7 +479,7 @@ def list_logs(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_logs" not in self._stubs:
- self._stubs["list_logs"] = self.grpc_channel.unary_unary(
+ self._stubs["list_logs"] = self._logged_channel.unary_unary(
"/google.logging.v2.LoggingServiceV2/ListLogs",
request_serializer=logging.ListLogsRequest.serialize,
response_deserializer=logging.ListLogsResponse.deserialize,
@@ -413,15 +509,140 @@ def tail_log_entries(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "tail_log_entries" not in self._stubs:
- self._stubs["tail_log_entries"] = self.grpc_channel.stream_stream(
+ self._stubs["tail_log_entries"] = self._logged_channel.stream_stream(
"/google.logging.v2.LoggingServiceV2/TailLogEntries",
request_serializer=logging.TailLogEntriesRequest.serialize,
response_deserializer=logging.TailLogEntriesResponse.deserialize,
)
return self._stubs["tail_log_entries"]
+ def _prep_wrapped_messages(self, client_info):
+ """Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+ self._wrapped_methods = {
+ self.delete_log: self._wrap_method(
+ self.delete_log,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.write_log_entries: self._wrap_method(
+ self.write_log_entries,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.list_log_entries: self._wrap_method(
+ self.list_log_entries,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.list_monitored_resource_descriptors: self._wrap_method(
+ self.list_monitored_resource_descriptors,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.list_logs: self._wrap_method(
+ self.list_logs,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.tail_log_entries: self._wrap_method(
+ self.tail_log_entries,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=3600.0,
+ ),
+ default_timeout=3600.0,
+ client_info=client_info,
+ ),
+ self.cancel_operation: self._wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: self._wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: self._wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ }
+
+ def _wrap_method(self, func, *args, **kwargs):
+ if self._wrap_with_kind: # pragma: NO COVER
+ kwargs["kind"] = self.kind
+ return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
def close(self):
- return self.grpc_channel.close()
+ return self._logged_channel.close()
+
+ @property
+ def kind(self) -> str:
+ return "grpc_asyncio"
@property
def cancel_operation(
@@ -433,7 +654,7 @@ def cancel_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
- self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["cancel_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
@@ -450,7 +671,7 @@ def get_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
- self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["get_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
@@ -469,7 +690,7 @@ def list_operations(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
- self._stubs["list_operations"] = self.grpc_channel.unary_unary(
+ self._stubs["list_operations"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py
index 3b688ccb4..41a1ef4a6 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py
index a120c352b..129fc055b 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,11 +13,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import logging as std_logging
from collections import OrderedDict
-import functools
import re
from typing import (
Dict,
+ Callable,
Mapping,
MutableMapping,
MutableSequence,
@@ -33,14 +34,16 @@
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
-from google.api_core import retry as retries
+from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
+
try:
- OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
+ OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
- OptionalRetry = Union[retries.Retry, object] # type: ignore
+ OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore
from google.api import distribution_pb2 # type: ignore
from google.api import metric_pb2 # type: ignore
@@ -52,14 +55,27 @@
from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport
from .client import MetricsServiceV2Client
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
class MetricsServiceV2AsyncClient:
"""Service for configuring logs-based metrics."""
_client: MetricsServiceV2Client
+ # Copy defaults from the synchronous client for use here.
+ # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
DEFAULT_ENDPOINT = MetricsServiceV2Client.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ _DEFAULT_ENDPOINT_TEMPLATE = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE
+ _DEFAULT_UNIVERSE = MetricsServiceV2Client._DEFAULT_UNIVERSE
log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path)
parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path)
@@ -166,19 +182,40 @@ def transport(self) -> MetricsServiceV2Transport:
"""
return self._client.transport
- get_transport_class = functools.partial(
- type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client)
- )
+ @property
+ def api_endpoint(self):
+ """Return the API endpoint used by the client instance.
+
+ Returns:
+ str: The API endpoint used by the client instance.
+ """
+ return self._client._api_endpoint
+
+ @property
+ def universe_domain(self) -> str:
+ """Return the universe domain used by the client instance.
+
+ Returns:
+ str: The universe domain used
+ by the client instance.
+ """
+ return self._client._universe_domain
+
+ get_transport_class = MetricsServiceV2Client.get_transport_class
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
- transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio",
+ transport: Optional[
+ Union[
+ str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]
+ ]
+ ] = "grpc_asyncio",
client_options: Optional[ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
- """Instantiates the metrics service v2 client.
+ """Instantiates the metrics service v2 async client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
@@ -186,26 +223,43 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.MetricsServiceV2Transport]): The
- transport to use. If set to None, a transport is chosen
- automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
+ transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]):
+ The transport to use, or a Callable that constructs and returns a new transport to use.
+ If a Callable is given, it will be called with the same set of initialization
+ arguments as used in the MetricsServiceV2Transport constructor.
+ If set to None, a transport is chosen automatically.
+ client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+ Custom options for the client.
+
+ 1. The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client when ``transport`` is
+ not explicitly provided. Only if this property is not set and
+ ``transport`` was not explicitly provided, the endpoint is
+ determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+ variable, which have one of the following values:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ use the default regular endpoint) and "auto" (auto-switch to the
+ default mTLS endpoint if client certificate is present; this is
+ the default value).
+
+ 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
+ to provide a client certificate for mTLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
+ 3. The ``universe_domain`` property can be used to override the
+ default "googleapis.com" universe. Note that ``api_endpoint``
+ property still takes precedence; and ``universe_domain`` is
+ currently not supported for mTLS.
+
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
@@ -217,6 +271,28 @@ def __init__(
client_info=client_info,
)
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.logging_v2.MetricsServiceV2AsyncClient`.",
+ extra={
+ "serviceName": "google.logging.v2.MetricsServiceV2",
+ "universeDomain": getattr(
+ self._client._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._client._transport, "_credentials")
+ else {
+ "serviceName": "google.logging.v2.MetricsServiceV2",
+ "credentialsType": None,
+ },
+ )
+
async def list_log_metrics(
self,
request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None,
@@ -224,7 +300,7 @@ async def list_log_metrics(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListLogMetricsAsyncPager:
r"""Lists logs-based metrics.
@@ -269,11 +345,13 @@ async def sample_list_log_metrics():
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager:
@@ -285,16 +363,22 @@ async def sample_list_log_metrics():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_metrics.ListLogMetricsRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_metrics.ListLogMetricsRequest):
+ request = logging_metrics.ListLogMetricsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -303,22 +387,9 @@ async def sample_list_log_metrics():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.list_log_metrics,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.list_log_metrics
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -326,6 +397,9 @@ async def sample_list_log_metrics():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -340,6 +414,8 @@ async def sample_list_log_metrics():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -353,7 +429,7 @@ async def get_log_metric(
metric_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_metrics.LogMetric:
r"""Gets a logs-based metric.
@@ -396,11 +472,13 @@ async def sample_get_log_metric():
This corresponds to the ``metric_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogMetric:
@@ -419,16 +497,22 @@ async def sample_get_log_metric():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([metric_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [metric_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_metrics.GetLogMetricRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_metrics.GetLogMetricRequest):
+ request = logging_metrics.GetLogMetricRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -437,22 +521,9 @@ async def sample_get_log_metric():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.get_log_metric,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.get_log_metric
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -462,6 +533,9 @@ async def sample_get_log_metric():
),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -481,7 +555,7 @@ async def create_log_metric(
metric: Optional[logging_metrics.LogMetric] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_metrics.LogMetric:
r"""Creates a logs-based metric.
@@ -540,11 +614,13 @@ async def sample_create_log_metric():
This corresponds to the ``metric`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogMetric:
@@ -563,16 +639,22 @@ async def sample_create_log_metric():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent, metric])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent, metric]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_metrics.CreateLogMetricRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_metrics.CreateLogMetricRequest):
+ request = logging_metrics.CreateLogMetricRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -583,11 +665,9 @@ async def sample_create_log_metric():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.create_log_metric,
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.create_log_metric
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -595,6 +675,9 @@ async def sample_create_log_metric():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -614,7 +697,7 @@ async def update_log_metric(
metric: Optional[logging_metrics.LogMetric] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_metrics.LogMetric:
r"""Creates or updates a logs-based metric.
@@ -672,11 +755,13 @@ async def sample_update_log_metric():
This corresponds to the ``metric`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogMetric:
@@ -695,16 +780,22 @@ async def sample_update_log_metric():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([metric_name, metric])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [metric_name, metric]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_metrics.UpdateLogMetricRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_metrics.UpdateLogMetricRequest):
+ request = logging_metrics.UpdateLogMetricRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -715,22 +806,9 @@ async def sample_update_log_metric():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.update_log_metric,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.update_log_metric
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -740,6 +818,9 @@ async def sample_update_log_metric():
),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -758,7 +839,7 @@ async def delete_log_metric(
metric_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes a logs-based metric.
@@ -798,23 +879,31 @@ async def sample_delete_log_metric():
This corresponds to the ``metric_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([metric_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [metric_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = logging_metrics.DeleteLogMetricRequest(request)
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
+ if not isinstance(request, logging_metrics.DeleteLogMetricRequest):
+ request = logging_metrics.DeleteLogMetricRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
@@ -823,22 +912,9 @@ async def sample_delete_log_metric():
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.delete_log_metric,
- default_retry=retries.Retry(
- initial=0.1,
- maximum=60.0,
- multiplier=1.3,
- predicate=retries.if_exception_type(
- core_exceptions.DeadlineExceeded,
- core_exceptions.InternalServerError,
- core_exceptions.ServiceUnavailable,
- ),
- deadline=60.0,
- ),
- default_timeout=60.0,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._client._transport._wrapped_methods[
+ self._client._transport.delete_log_metric
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -848,6 +924,9 @@ async def sample_delete_log_metric():
),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -862,7 +941,7 @@ async def list_operations(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
@@ -870,11 +949,13 @@ async def list_operations(
request (:class:`~.operations_pb2.ListOperationsRequest`):
The request object. Request message for
`ListOperations` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
@@ -887,11 +968,7 @@ async def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -899,6 +976,9 @@ async def list_operations(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -916,7 +996,7 @@ async def get_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
@@ -924,11 +1004,13 @@ async def get_operation(
request (:class:`~.operations_pb2.GetOperationRequest`):
The request object. Request message for
`GetOperation` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
@@ -941,11 +1023,7 @@ async def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -953,6 +1031,9 @@ async def get_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
response = await rpc(
request,
@@ -970,7 +1051,7 @@ async def cancel_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
@@ -982,11 +1063,13 @@ async def cancel_operation(
request (:class:`~.operations_pb2.CancelOperationRequest`):
The request object. Request message for
`CancelOperation` method.
- retry (google.api_core.retry.Retry): Designation of what errors,
+ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
None
"""
@@ -998,11 +1081,7 @@ async def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._client._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1010,6 +1089,9 @@ async def cancel_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._client._validate_universe_domain()
+
# Send the request.
await rpc(
request,
@@ -1029,5 +1111,8 @@ async def __aexit__(self, exc_type, exc, tb):
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
__all__ = ("MetricsServiceV2AsyncClient",)
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py
index 098014bcd..f2f0f8ce1 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/client.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -14,10 +14,14 @@
# limitations under the License.
#
from collections import OrderedDict
+from http import HTTPStatus
+import json
+import logging as std_logging
import os
import re
from typing import (
Dict,
+ Callable,
Mapping,
MutableMapping,
MutableSequence,
@@ -28,6 +32,7 @@
Union,
cast,
)
+import warnings
from google.cloud.logging_v2 import gapic_version as package_version
@@ -40,11 +45,21 @@
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
try:
- OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
+ OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
except AttributeError: # pragma: NO COVER
- OptionalRetry = Union[retries.Retry, object] # type: ignore
+ OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
from google.api import distribution_pb2 # type: ignore
from google.api import metric_pb2 # type: ignore
@@ -126,11 +141,15 @@ def _get_default_mtls_endpoint(api_endpoint):
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+ # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead.
DEFAULT_ENDPOINT = "logging.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
+ _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}"
+ _DEFAULT_UNIVERSE = "googleapis.com"
+
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -276,7 +295,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]:
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
- """Return the API endpoint and client cert source for mutual TLS.
+ """Deprecated. Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
@@ -306,6 +325,11 @@ def get_mtls_endpoint_and_cert_source(
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
+
+ warnings.warn(
+ "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.",
+ DeprecationWarning,
+ )
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
@@ -339,11 +363,180 @@ def get_mtls_endpoint_and_cert_source(
return api_endpoint, client_cert_source
+ @staticmethod
+ def _read_environment_variables():
+ """Returns the environment variables used by the client.
+
+ Returns:
+ Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE,
+ GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables.
+
+ Raises:
+ ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not
+ any of ["true", "false"].
+ google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT
+ is not any of ["auto", "never", "always"].
+ """
+ use_client_cert = os.getenv(
+ "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
+ ).lower()
+ use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower()
+ universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN")
+ if use_client_cert not in ("true", "false"):
+ raise ValueError(
+ "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+ if use_mtls_endpoint not in ("auto", "never", "always"):
+ raise MutualTLSChannelError(
+ "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+ return use_client_cert == "true", use_mtls_endpoint, universe_domain_env
+
+ @staticmethod
+ def _get_client_cert_source(provided_cert_source, use_cert_flag):
+ """Return the client cert source to be used by the client.
+
+ Args:
+ provided_cert_source (bytes): The client certificate source provided.
+ use_cert_flag (bool): A flag indicating whether to use the client certificate.
+
+ Returns:
+ bytes or None: The client cert source to be used by the client.
+ """
+ client_cert_source = None
+ if use_cert_flag:
+ if provided_cert_source:
+ client_cert_source = provided_cert_source
+ elif mtls.has_default_client_cert_source():
+ client_cert_source = mtls.default_client_cert_source()
+ return client_cert_source
+
+ @staticmethod
+ def _get_api_endpoint(
+ api_override, client_cert_source, universe_domain, use_mtls_endpoint
+ ):
+ """Return the API endpoint used by the client.
+
+ Args:
+ api_override (str): The API endpoint override. If specified, this is always
+ the return value of this function and the other arguments are not used.
+ client_cert_source (bytes): The client certificate source used by the client.
+ universe_domain (str): The universe domain used by the client.
+ use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters.
+ Possible values are "always", "auto", or "never".
+
+ Returns:
+ str: The API endpoint to be used by the client.
+ """
+ if api_override is not None:
+ api_endpoint = api_override
+ elif use_mtls_endpoint == "always" or (
+ use_mtls_endpoint == "auto" and client_cert_source
+ ):
+ _default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE
+ if universe_domain != _default_universe:
+ raise MutualTLSChannelError(
+ f"mTLS is not supported in any universe other than {_default_universe}."
+ )
+ api_endpoint = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=universe_domain
+ )
+ return api_endpoint
+
+ @staticmethod
+ def _get_universe_domain(
+ client_universe_domain: Optional[str], universe_domain_env: Optional[str]
+ ) -> str:
+ """Return the universe domain used by the client.
+
+ Args:
+ client_universe_domain (Optional[str]): The universe domain configured via the client options.
+ universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable.
+
+ Returns:
+ str: The universe domain to be used by the client.
+
+ Raises:
+ ValueError: If the universe domain is an empty string.
+ """
+ universe_domain = MetricsServiceV2Client._DEFAULT_UNIVERSE
+ if client_universe_domain is not None:
+ universe_domain = client_universe_domain
+ elif universe_domain_env is not None:
+ universe_domain = universe_domain_env
+ if len(universe_domain.strip()) == 0:
+ raise ValueError("Universe Domain cannot be an empty string.")
+ return universe_domain
+
+ def _validate_universe_domain(self):
+ """Validates client's and credentials' universe domains are consistent.
+
+ Returns:
+ bool: True iff the configured universe domain is valid.
+
+ Raises:
+ ValueError: If the configured universe domain is not valid.
+ """
+
+ # NOTE (b/349488459): universe validation is disabled until further notice.
+ return True
+
+ def _add_cred_info_for_auth_errors(
+ self, error: core_exceptions.GoogleAPICallError
+ ) -> None:
+ """Adds credential info string to error details for 401/403/404 errors.
+
+ Args:
+ error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info.
+ """
+ if error.code not in [
+ HTTPStatus.UNAUTHORIZED,
+ HTTPStatus.FORBIDDEN,
+ HTTPStatus.NOT_FOUND,
+ ]:
+ return
+
+ cred = self._transport._credentials
+
+ # get_cred_info is only available in google-auth>=2.35.0
+ if not hasattr(cred, "get_cred_info"):
+ return
+
+ # ignore the type check since pypy test fails when get_cred_info
+ # is not available
+ cred_info = cred.get_cred_info() # type: ignore
+ if cred_info and hasattr(error._details, "append"):
+ error._details.append(json.dumps(cred_info))
+
+ @property
+ def api_endpoint(self):
+ """Return the API endpoint used by the client instance.
+
+ Returns:
+ str: The API endpoint used by the client instance.
+ """
+ return self._api_endpoint
+
+ @property
+ def universe_domain(self) -> str:
+ """Return the universe domain used by the client instance.
+
+ Returns:
+ str: The universe domain used by the client instance.
+ """
+ return self._universe_domain
+
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
- transport: Optional[Union[str, MetricsServiceV2Transport]] = None,
+ transport: Optional[
+ Union[
+ str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]
+ ]
+ ] = None,
client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -355,25 +548,37 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, MetricsServiceV2Transport]): The
- transport to use. If set to None, a transport is chosen
- automatically.
- client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the
- client. It won't take effect if a ``transport`` instance is provided.
- (1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
- environment variable can also be used to override the endpoint:
+ transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]):
+ The transport to use, or a Callable that constructs and returns a new transport.
+ If a Callable is given, it will be called with the same set of initialization
+ arguments as used in the MetricsServiceV2Transport constructor.
+ If set to None, a transport is chosen automatically.
+ client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]):
+ Custom options for the client.
+
+ 1. The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client when ``transport`` is
+ not explicitly provided. Only if this property is not set and
+ ``transport`` was not explicitly provided, the endpoint is
+ determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment
+ variable, which have one of the following values:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint) and "auto" (auto switch to the
- default mTLS endpoint if client certificate is present, this is
- the default value). However, the ``api_endpoint`` property takes
- precedence if provided.
- (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ use the default regular endpoint) and "auto" (auto-switch to the
+ default mTLS endpoint if client certificate is present; this is
+ the default value).
+
+ 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
- to provide client certificate for mutual TLS transport. If
+ to provide a client certificate for mTLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
+
+ 3. The ``universe_domain`` property can be used to override the
+ default "googleapis.com" universe. Note that the ``api_endpoint``
+ property still takes precedence; and ``universe_domain`` is
+ currently not supported for mTLS.
+
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
@@ -384,17 +589,38 @@ def __init__(
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
- if isinstance(client_options, dict):
- client_options = client_options_lib.from_dict(client_options)
- if client_options is None:
- client_options = client_options_lib.ClientOptions()
- client_options = cast(client_options_lib.ClientOptions, client_options)
+ self._client_options = client_options
+ if isinstance(self._client_options, dict):
+ self._client_options = client_options_lib.from_dict(self._client_options)
+ if self._client_options is None:
+ self._client_options = client_options_lib.ClientOptions()
+ self._client_options = cast(
+ client_options_lib.ClientOptions, self._client_options
+ )
+
+ universe_domain_opt = getattr(self._client_options, "universe_domain", None)
- api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
- client_options
+ (
+ self._use_client_cert,
+ self._use_mtls_endpoint,
+ self._universe_domain_env,
+ ) = MetricsServiceV2Client._read_environment_variables()
+ self._client_cert_source = MetricsServiceV2Client._get_client_cert_source(
+ self._client_options.client_cert_source, self._use_client_cert
)
+ self._universe_domain = MetricsServiceV2Client._get_universe_domain(
+ universe_domain_opt, self._universe_domain_env
+ )
+ self._api_endpoint = None # updated below, depending on `transport`
+
+ # Initialize the universe domain validation.
+ self._is_universe_domain_valid = False
- api_key_value = getattr(client_options, "api_key", None)
+ if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER
+ # Setup logging.
+ client_logging.initialize_logging()
+
+ api_key_value = getattr(self._client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
@@ -403,20 +629,33 @@ def __init__(
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
- if isinstance(transport, MetricsServiceV2Transport):
+ transport_provided = isinstance(transport, MetricsServiceV2Transport)
+ if transport_provided:
# transport is a MetricsServiceV2Transport instance.
- if credentials or client_options.credentials_file or api_key_value:
+ if credentials or self._client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
- if client_options.scopes:
+ if self._client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
- self._transport = transport
- else:
+ self._transport = cast(MetricsServiceV2Transport, transport)
+ self._api_endpoint = self._transport.host
+
+ self._api_endpoint = (
+ self._api_endpoint
+ or MetricsServiceV2Client._get_api_endpoint(
+ self._client_options.api_endpoint,
+ self._client_cert_source,
+ self._universe_domain,
+ self._use_mtls_endpoint,
+ )
+ )
+
+ if not transport_provided:
import google.auth._default # type: ignore
if api_key_value and hasattr(
@@ -426,19 +665,50 @@ def __init__(
api_key_value
)
- Transport = type(self).get_transport_class(transport)
- self._transport = Transport(
+ transport_init: Union[
+ Type[MetricsServiceV2Transport],
+ Callable[..., MetricsServiceV2Transport],
+ ] = (
+ MetricsServiceV2Client.get_transport_class(transport)
+ if isinstance(transport, str) or transport is None
+ else cast(Callable[..., MetricsServiceV2Transport], transport)
+ )
+ # initialize with the provided callable or the passed in class
+ self._transport = transport_init(
credentials=credentials,
- credentials_file=client_options.credentials_file,
- host=api_endpoint,
- scopes=client_options.scopes,
- client_cert_source_for_mtls=client_cert_source_func,
- quota_project_id=client_options.quota_project_id,
+ credentials_file=self._client_options.credentials_file,
+ host=self._api_endpoint,
+ scopes=self._client_options.scopes,
+ client_cert_source_for_mtls=self._client_cert_source,
+ quota_project_id=self._client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
- api_audience=client_options.api_audience,
+ api_audience=self._client_options.api_audience,
)
+ if "async" not in str(self._transport):
+ if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ ): # pragma: NO COVER
+ _LOGGER.debug(
+ "Created client `google.logging_v2.MetricsServiceV2Client`.",
+ extra={
+ "serviceName": "google.logging.v2.MetricsServiceV2",
+ "universeDomain": getattr(
+ self._transport._credentials, "universe_domain", ""
+ ),
+ "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}",
+ "credentialsInfo": getattr(
+ self.transport._credentials, "get_cred_info", lambda: None
+ )(),
+ }
+ if hasattr(self._transport, "_credentials")
+ else {
+ "serviceName": "google.logging.v2.MetricsServiceV2",
+ "credentialsType": None,
+ },
+ )
+
def list_log_metrics(
self,
request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None,
@@ -446,7 +716,7 @@ def list_log_metrics(
parent: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> pagers.ListLogMetricsPager:
r"""Lists logs-based metrics.
@@ -494,8 +764,10 @@ def sample_list_log_metrics():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager:
@@ -507,19 +779,20 @@ def sample_list_log_metrics():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_metrics.ListLogMetricsRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_metrics.ListLogMetricsRequest):
request = logging_metrics.ListLogMetricsRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -537,6 +810,9 @@ def sample_list_log_metrics():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -551,6 +827,8 @@ def sample_list_log_metrics():
method=rpc,
request=request,
response=response,
+ retry=retry,
+ timeout=timeout,
metadata=metadata,
)
@@ -564,7 +842,7 @@ def get_log_metric(
metric_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_metrics.LogMetric:
r"""Gets a logs-based metric.
@@ -610,8 +888,10 @@ def sample_get_log_metric():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogMetric:
@@ -630,19 +910,20 @@ def sample_get_log_metric():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([metric_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [metric_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_metrics.GetLogMetricRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_metrics.GetLogMetricRequest):
request = logging_metrics.GetLogMetricRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -662,6 +943,9 @@ def sample_get_log_metric():
),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -681,7 +965,7 @@ def create_log_metric(
metric: Optional[logging_metrics.LogMetric] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_metrics.LogMetric:
r"""Creates a logs-based metric.
@@ -743,8 +1027,10 @@ def sample_create_log_metric():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogMetric:
@@ -763,19 +1049,20 @@ def sample_create_log_metric():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([parent, metric])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [parent, metric]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_metrics.CreateLogMetricRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_metrics.CreateLogMetricRequest):
request = logging_metrics.CreateLogMetricRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -795,6 +1082,9 @@ def sample_create_log_metric():
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -814,7 +1104,7 @@ def update_log_metric(
metric: Optional[logging_metrics.LogMetric] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> logging_metrics.LogMetric:
r"""Creates or updates a logs-based metric.
@@ -875,8 +1165,10 @@ def sample_update_log_metric():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
google.cloud.logging_v2.types.LogMetric:
@@ -895,19 +1187,20 @@ def sample_update_log_metric():
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([metric_name, metric])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [metric_name, metric]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_metrics.UpdateLogMetricRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_metrics.UpdateLogMetricRequest):
request = logging_metrics.UpdateLogMetricRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -929,6 +1222,9 @@ def sample_update_log_metric():
),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
response = rpc(
request,
@@ -947,7 +1243,7 @@ def delete_log_metric(
metric_name: Optional[str] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Deletes a logs-based metric.
@@ -990,23 +1286,26 @@ def sample_delete_log_metric():
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
# Create or coerce a protobuf request object.
- # Quick check: If we got a request object, we should *not* have
- # gotten any keyword arguments that map to the request.
- has_flattened_params = any([metric_name])
+ # - Quick check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ flattened_params = [metric_name]
+ has_flattened_params = (
+ len([param for param in flattened_params if param is not None]) > 0
+ )
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- # Minor optimization to avoid making a copy if the user passes
- # in a logging_metrics.DeleteLogMetricRequest.
- # There's no risk of modifying the input as we've already verified
- # there are no flattened fields.
+ # - Use the request object if provided (there's no risk of modifying the input as
+ # there are no flattened fields), or create one.
if not isinstance(request, logging_metrics.DeleteLogMetricRequest):
request = logging_metrics.DeleteLogMetricRequest(request)
# If we have keyword arguments corresponding to fields on the
@@ -1026,6 +1325,9 @@ def sample_delete_log_metric():
),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -1053,7 +1355,7 @@ def list_operations(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.ListOperationsResponse:
r"""Lists operations that match the specified filter in the request.
@@ -1064,8 +1366,10 @@ def list_operations(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.ListOperationsResponse:
Response message for ``ListOperations`` method.
@@ -1078,11 +1382,7 @@ def list_operations(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_operations,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1090,16 +1390,23 @@ def list_operations(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
- # Send the request.
- response = rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
+ # Validate the universe domain.
+ self._validate_universe_domain()
- # Done; return the response.
- return response
+ try:
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+ except core_exceptions.GoogleAPICallError as e:
+ self._add_cred_info_for_auth_errors(e)
+ raise e
def get_operation(
self,
@@ -1107,7 +1414,7 @@ def get_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> operations_pb2.Operation:
r"""Gets the latest state of a long-running operation.
@@ -1118,8 +1425,10 @@ def get_operation(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
~.operations_pb2.Operation:
An ``Operation`` object.
@@ -1132,11 +1441,7 @@ def get_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1144,16 +1449,23 @@ def get_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
- # Send the request.
- response = rpc(
- request,
- retry=retry,
- timeout=timeout,
- metadata=metadata,
- )
+ # Validate the universe domain.
+ self._validate_universe_domain()
- # Done; return the response.
- return response
+ try:
+ # Send the request.
+ response = rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+ except core_exceptions.GoogleAPICallError as e:
+ self._add_cred_info_for_auth_errors(e)
+ raise e
def cancel_operation(
self,
@@ -1161,7 +1473,7 @@ def cancel_operation(
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Union[float, object] = gapic_v1.method.DEFAULT,
- metadata: Sequence[Tuple[str, str]] = (),
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = (),
) -> None:
r"""Starts asynchronous cancellation on a long-running operation.
@@ -1176,8 +1488,10 @@ def cancel_operation(
retry (google.api_core.retry.Retry): Designation of what errors,
if any, should be retried.
timeout (float): The timeout for this request.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
Returns:
None
"""
@@ -1189,11 +1503,7 @@ def cancel_operation(
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.cancel_operation,
- default_timeout=None,
- client_info=DEFAULT_CLIENT_INFO,
- )
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1201,6 +1511,9 @@ def cancel_operation(
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
+ # Validate the universe domain.
+ self._validate_universe_domain()
+
# Send the request.
rpc(
request,
@@ -1214,5 +1527,7 @@ def cancel_operation(
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
__all__ = ("MetricsServiceV2Client",)
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py
index dd23001cc..75fc998a2 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+from google.api_core import gapic_v1
+from google.api_core import retry as retries
+from google.api_core import retry_async as retries_async
from typing import (
Any,
AsyncIterator,
@@ -22,8 +25,18 @@
Tuple,
Optional,
Iterator,
+ Union,
)
+try:
+ OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None]
+ OptionalAsyncRetry = Union[
+ retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None
+ ]
+except AttributeError: # pragma: NO COVER
+ OptionalRetry = Union[retries.Retry, object, None] # type: ignore
+ OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore
+
from google.cloud.logging_v2.types import logging_metrics
@@ -51,7 +64,9 @@ def __init__(
request: logging_metrics.ListLogMetricsRequest,
response: logging_metrics.ListLogMetricsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiate the pager.
@@ -62,12 +77,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListLogMetricsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.Retry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_metrics.ListLogMetricsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -78,7 +100,12 @@ def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request, metadata=self._metadata)
+ self._response = self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __iter__(self) -> Iterator[logging_metrics.LogMetric]:
@@ -113,7 +140,9 @@ def __init__(
request: logging_metrics.ListLogMetricsRequest,
response: logging_metrics.ListLogMetricsResponse,
*,
- metadata: Sequence[Tuple[str, str]] = ()
+ retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT,
+ timeout: Union[float, object] = gapic_v1.method.DEFAULT,
+ metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()
):
"""Instantiates the pager.
@@ -124,12 +153,19 @@ def __init__(
The initial request object.
response (google.cloud.logging_v2.types.ListLogMetricsResponse):
The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
+ retry (google.api_core.retry.AsyncRetry): Designation of what errors,
+ if any, should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be
+ sent along with the request as metadata. Normally, each value must be of type `str`,
+ but for metadata keys ending with the suffix `-bin`, the corresponding values must
+ be of type `bytes`.
"""
self._method = method
self._request = logging_metrics.ListLogMetricsRequest(request)
self._response = response
+ self._retry = retry
+ self._timeout = timeout
self._metadata = metadata
def __getattr__(self, name: str) -> Any:
@@ -140,7 +176,12 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = await self._method(self._request, metadata=self._metadata)
+ self._response = await self._method(
+ self._request,
+ retry=self._retry,
+ timeout=self._timeout,
+ metadata=self._metadata,
+ )
yield self._response
def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]:
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst b/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst
new file mode 100644
index 000000000..00dffa25f
--- /dev/null
+++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst
@@ -0,0 +1,9 @@
+
+transport inheritance structure
+_______________________________
+
+`MetricsServiceV2Transport` is the ABC for all transports.
+- public child `MetricsServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`).
+- public child `MetricsServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`).
+- private child `_BaseMetricsServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`).
+- public child `MetricsServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`).
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py
index 57d82514d..4975feb99 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py
index f8c4b954f..22bc19736 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -25,6 +25,7 @@
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+import google.protobuf
from google.cloud.logging_v2.types import logging_metrics
from google.longrunning import operations_pb2 # type: ignore
@@ -34,6 +35,9 @@
gapic_version=package_version.__version__
)
+if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER
+ DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__
+
class MetricsServiceV2Transport(abc.ABC):
"""Abstract transport class for MetricsServiceV2."""
@@ -65,7 +69,7 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
@@ -90,6 +94,8 @@ def __init__(
# Save the scopes.
self._scopes = scopes
+ if not hasattr(self, "_ignore_credentials"):
+ self._ignore_credentials: bool = False
# If no credentials are provided, then determine the appropriate
# defaults.
@@ -102,7 +108,7 @@ def __init__(
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
- elif credentials is None:
+ elif credentials is None and not self._ignore_credentials:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
@@ -128,6 +134,10 @@ def __init__(
host += ":443"
self._host = host
+ @property
+ def host(self):
+ return self._host
+
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -200,6 +210,21 @@ def _prep_wrapped_messages(self, client_info):
default_timeout=60.0,
client_info=client_info,
),
+ self.cancel_operation: gapic_v1.method.wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: gapic_v1.method.wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: gapic_v1.method.wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
}
def close(self):
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py
index 9426a670c..fe0943a94 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import json
+import logging as std_logging
+import pickle
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
@@ -21,14 +24,91 @@
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from google.cloud.logging_v2.types import logging_metrics
from google.longrunning import operations_pb2 # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER
+ def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.logging.v2.MetricsServiceV2",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = response.result()
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response for {client_call_details.method}.",
+ extra={
+ "serviceName": "google.logging.v2.MetricsServiceV2",
+ "rpcName": client_call_details.method,
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport):
"""gRPC backend transport for MetricsServiceV2.
@@ -52,7 +132,7 @@ def __init__(
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
- channel: Optional[grpc.Channel] = None,
+ channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
@@ -66,20 +146,23 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
- ignored if ``channel`` is provided.
- channel (Optional[grpc.Channel]): A ``Channel`` instance through
- which to make calls.
+ ignored if a ``channel`` instance is provided.
+ channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
+ A ``Channel`` instance through which to make calls, or a Callable
+ that constructs and returns one. If set to None, ``self.create_channel``
+ is used to create the channel. If a Callable is given, it will be called
+ with the same arguments as used in ``self.create_channel``.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
@@ -89,11 +172,11 @@ def __init__(
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for the grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if a ``channel`` instance is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
- ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -119,9 +202,10 @@ def __init__(
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- if channel:
+ if isinstance(channel, grpc.Channel):
# Ignore credentials if a channel was passed.
- credentials = False
+ credentials = None
+ self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
@@ -160,7 +244,9 @@ def __init__(
)
if not self._grpc_channel:
- self._grpc_channel = type(self).create_channel(
+ # initialize with the provided callable or the default channel
+ channel_init = channel or type(self).create_channel
+ self._grpc_channel = channel_init(
self._host,
# use the credentials which are saved
credentials=self._credentials,
@@ -176,7 +262,12 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientInterceptor()
+ self._logged_channel = grpc.intercept_channel(
+ self._grpc_channel, self._interceptor
+ )
+
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
@@ -252,7 +343,7 @@ def list_log_metrics(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_log_metrics" not in self._stubs:
- self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary(
+ self._stubs["list_log_metrics"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/ListLogMetrics",
request_serializer=logging_metrics.ListLogMetricsRequest.serialize,
response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize,
@@ -278,7 +369,7 @@ def get_log_metric(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_log_metric" not in self._stubs:
- self._stubs["get_log_metric"] = self.grpc_channel.unary_unary(
+ self._stubs["get_log_metric"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/GetLogMetric",
request_serializer=logging_metrics.GetLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
@@ -304,7 +395,7 @@ def create_log_metric(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_log_metric" not in self._stubs:
- self._stubs["create_log_metric"] = self.grpc_channel.unary_unary(
+ self._stubs["create_log_metric"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/CreateLogMetric",
request_serializer=logging_metrics.CreateLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
@@ -330,7 +421,7 @@ def update_log_metric(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_log_metric" not in self._stubs:
- self._stubs["update_log_metric"] = self.grpc_channel.unary_unary(
+ self._stubs["update_log_metric"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/UpdateLogMetric",
request_serializer=logging_metrics.UpdateLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
@@ -356,7 +447,7 @@ def delete_log_metric(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_log_metric" not in self._stubs:
- self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_log_metric"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/DeleteLogMetric",
request_serializer=logging_metrics.DeleteLogMetricRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
@@ -364,7 +455,7 @@ def delete_log_metric(
return self._stubs["delete_log_metric"]
def close(self):
- self.grpc_channel.close()
+ self._logged_channel.close()
@property
def cancel_operation(
@@ -376,7 +467,7 @@ def cancel_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
- self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["cancel_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
@@ -393,7 +484,7 @@ def get_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
- self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["get_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
@@ -412,7 +503,7 @@ def list_operations(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
- self._stubs["list_operations"] = self.grpc_channel.unary_unary(
+ self._stubs["list_operations"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py
index 1756f9a1d..01aa05771 100644
--- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py
+++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,15 +13,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+import inspect
+import json
+import pickle
+import logging as std_logging
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
+from google.api_core import exceptions as core_exceptions
+from google.api_core import retry_async as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.protobuf.json_format import MessageToJson
+import google.protobuf.message
import grpc # type: ignore
+import proto # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.logging_v2.types import logging_metrics
@@ -30,6 +39,82 @@
from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO
from .grpc import MetricsServiceV2GrpcTransport
+try:
+ from google.api_core import client_logging # type: ignore
+
+ CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER
+except ImportError: # pragma: NO COVER
+ CLIENT_LOGGING_SUPPORTED = False
+
+_LOGGER = std_logging.getLogger(__name__)
+
+
+class _LoggingClientAIOInterceptor(
+ grpc.aio.UnaryUnaryClientInterceptor
+): # pragma: NO COVER
+ async def intercept_unary_unary(self, continuation, client_call_details, request):
+ logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(
+ std_logging.DEBUG
+ )
+ if logging_enabled: # pragma: NO COVER
+ request_metadata = client_call_details.metadata
+ if isinstance(request, proto.Message):
+ request_payload = type(request).to_json(request)
+ elif isinstance(request, google.protobuf.message.Message):
+ request_payload = MessageToJson(request)
+ else:
+ request_payload = f"{type(request).__name__}: {pickle.dumps(request)}"
+
+ request_metadata = {
+ key: value.decode("utf-8") if isinstance(value, bytes) else value
+ for key, value in request_metadata
+ }
+ grpc_request = {
+ "payload": request_payload,
+ "requestMethod": "grpc",
+ "metadata": dict(request_metadata),
+ }
+ _LOGGER.debug(
+ f"Sending request for {client_call_details.method}",
+ extra={
+ "serviceName": "google.logging.v2.MetricsServiceV2",
+ "rpcName": str(client_call_details.method),
+ "request": grpc_request,
+ "metadata": grpc_request["metadata"],
+ },
+ )
+ response = await continuation(client_call_details, request)
+ if logging_enabled: # pragma: NO COVER
+ response_metadata = await response.trailing_metadata()
+ # Convert gRPC metadata `` to list of tuples
+ metadata = (
+ dict([(k, str(v)) for k, v in response_metadata])
+ if response_metadata
+ else None
+ )
+ result = await response
+ if isinstance(result, proto.Message):
+ response_payload = type(result).to_json(result)
+ elif isinstance(result, google.protobuf.message.Message):
+ response_payload = MessageToJson(result)
+ else:
+ response_payload = f"{type(result).__name__}: {pickle.dumps(result)}"
+ grpc_response = {
+ "payload": response_payload,
+ "metadata": metadata,
+ "status": "OK",
+ }
+ _LOGGER.debug(
+ f"Received response to rpc {client_call_details.method}.",
+ extra={
+ "serviceName": "google.logging.v2.MetricsServiceV2",
+ "rpcName": str(client_call_details.method),
+ "response": grpc_response,
+ "metadata": grpc_response["metadata"],
+ },
+ )
+ return response
+
class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport):
"""gRPC AsyncIO backend transport for MetricsServiceV2.
@@ -67,7 +152,6 @@ def create_channel(
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -97,7 +181,7 @@ def __init__(
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
- channel: Optional[aio.Channel] = None,
+ channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
@@ -111,21 +195,24 @@ def __init__(
Args:
host (Optional[str]):
- The hostname to connect to.
+ The hostname to connect to (default: 'logging.googleapis.com').
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if a ``channel`` instance is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
- channel (Optional[aio.Channel]): A ``Channel`` instance through
- which to make calls.
+ channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]):
+ A ``Channel`` instance through which to make calls, or a Callable
+ that constructs and returns one. If set to None, ``self.create_channel``
+ is used to create the channel. If a Callable is given, it will be called
+ with the same arguments as used in ``self.create_channel``.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
@@ -135,11 +222,11 @@ def __init__(
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for the grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if a ``channel`` instance is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
- ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -165,9 +252,10 @@ def __init__(
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- if channel:
+ if isinstance(channel, aio.Channel):
# Ignore credentials if a channel was passed.
- credentials = False
+ credentials = None
+ self._ignore_credentials = True
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
@@ -205,7 +293,9 @@ def __init__(
)
if not self._grpc_channel:
- self._grpc_channel = type(self).create_channel(
+ # initialize with the provided callable or the default channel
+ channel_init = channel or type(self).create_channel
+ self._grpc_channel = channel_init(
self._host,
# use the credentials which are saved
credentials=self._credentials,
@@ -221,7 +311,13 @@ def __init__(
],
)
- # Wrap messages. This must be done after self._grpc_channel exists
+ self._interceptor = _LoggingClientAIOInterceptor()
+ self._grpc_channel._unary_unary_interceptors.append(self._interceptor)
+ self._logged_channel = self._grpc_channel
+ self._wrap_with_kind = (
+ "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters
+ )
+ # Wrap messages. This must be done after self._logged_channel exists
self._prep_wrapped_messages(client_info)
@property
@@ -256,7 +352,7 @@ def list_log_metrics(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_log_metrics" not in self._stubs:
- self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary(
+ self._stubs["list_log_metrics"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/ListLogMetrics",
request_serializer=logging_metrics.ListLogMetricsRequest.serialize,
response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize,
@@ -284,7 +380,7 @@ def get_log_metric(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_log_metric" not in self._stubs:
- self._stubs["get_log_metric"] = self.grpc_channel.unary_unary(
+ self._stubs["get_log_metric"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/GetLogMetric",
request_serializer=logging_metrics.GetLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
@@ -312,7 +408,7 @@ def create_log_metric(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_log_metric" not in self._stubs:
- self._stubs["create_log_metric"] = self.grpc_channel.unary_unary(
+ self._stubs["create_log_metric"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/CreateLogMetric",
request_serializer=logging_metrics.CreateLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
@@ -340,7 +436,7 @@ def update_log_metric(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_log_metric" not in self._stubs:
- self._stubs["update_log_metric"] = self.grpc_channel.unary_unary(
+ self._stubs["update_log_metric"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/UpdateLogMetric",
request_serializer=logging_metrics.UpdateLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
@@ -366,15 +462,113 @@ def delete_log_metric(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_log_metric" not in self._stubs:
- self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary(
+ self._stubs["delete_log_metric"] = self._logged_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/DeleteLogMetric",
request_serializer=logging_metrics.DeleteLogMetricRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_log_metric"]
+ def _prep_wrapped_messages(self, client_info):
+ """Precompute the wrapped methods, overriding the base class method to use async wrappers."""
+ self._wrapped_methods = {
+ self.list_log_metrics: self._wrap_method(
+ self.list_log_metrics,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.get_log_metric: self._wrap_method(
+ self.get_log_metric,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.create_log_metric: self._wrap_method(
+ self.create_log_metric,
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.update_log_metric: self._wrap_method(
+ self.update_log_metric,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.delete_log_metric: self._wrap_method(
+ self.delete_log_metric,
+ default_retry=retries.AsyncRetry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ core_exceptions.DeadlineExceeded,
+ core_exceptions.InternalServerError,
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=60.0,
+ ),
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.cancel_operation: self._wrap_method(
+ self.cancel_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.get_operation: self._wrap_method(
+ self.get_operation,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ self.list_operations: self._wrap_method(
+ self.list_operations,
+ default_timeout=None,
+ client_info=client_info,
+ ),
+ }
+
+ def _wrap_method(self, func, *args, **kwargs):
+ if self._wrap_with_kind: # pragma: NO COVER
+ kwargs["kind"] = self.kind
+ return gapic_v1.method_async.wrap_method(func, *args, **kwargs)
+
def close(self):
- return self.grpc_channel.close()
+ return self._logged_channel.close()
+
+ @property
+ def kind(self) -> str:
+ return "grpc_asyncio"
@property
def cancel_operation(
@@ -386,7 +580,7 @@ def cancel_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "cancel_operation" not in self._stubs:
- self._stubs["cancel_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["cancel_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/CancelOperation",
request_serializer=operations_pb2.CancelOperationRequest.SerializeToString,
response_deserializer=None,
@@ -403,7 +597,7 @@ def get_operation(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_operation" not in self._stubs:
- self._stubs["get_operation"] = self.grpc_channel.unary_unary(
+ self._stubs["get_operation"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/GetOperation",
request_serializer=operations_pb2.GetOperationRequest.SerializeToString,
response_deserializer=operations_pb2.Operation.FromString,
@@ -422,7 +616,7 @@ def list_operations(
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_operations" not in self._stubs:
- self._stubs["list_operations"] = self.grpc_channel.unary_unary(
+ self._stubs["list_operations"] = self._logged_channel.unary_unary(
"/google.longrunning.Operations/ListOperations",
request_serializer=operations_pb2.ListOperationsRequest.SerializeToString,
response_deserializer=operations_pb2.ListOperationsResponse.FromString,
diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py
index 38dec7cdf..efea79307 100644
--- a/google/cloud/logging_v2/types/__init__.py
+++ b/google/cloud/logging_v2/types/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/types/log_entry.py b/google/cloud/logging_v2/types/log_entry.py
index 98f768fb2..e52f3085d 100644
--- a/google/cloud/logging_v2/types/log_entry.py
+++ b/google/cloud/logging_v2/types/log_entry.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/types/logging.py b/google/cloud/logging_v2/types/logging.py
index 02a17fc7b..5b46a15e1 100644
--- a/google/cloud/logging_v2/types/logging.py
+++ b/google/cloud/logging_v2/types/logging.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py
index 7826bd026..6ed09222a 100644
--- a/google/cloud/logging_v2/types/logging_config.py
+++ b/google/cloud/logging_v2/types/logging_config.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/logging_v2/types/logging_metrics.py b/google/cloud/logging_v2/types/logging_metrics.py
index 74d167d5b..dd90dd3cb 100644
--- a/google/cloud/logging_v2/types/logging_metrics.py
+++ b/google/cloud/logging_v2/types/logging_metrics.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/noxfile.py b/noxfile.py
index 7ebe500a3..b75e78ac3 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,22 +17,32 @@
# Generated by synthtool. DO NOT EDIT!
from __future__ import absolute_import
+
import os
import pathlib
import re
import shutil
+from typing import Dict, List
import warnings
import nox
FLAKE8_VERSION = "flake8==6.1.0"
-BLACK_VERSION = "black==22.3.0"
-ISORT_VERSION = "isort==5.10.1"
+BLACK_VERSION = "black[jupyter]==23.7.0"
+ISORT_VERSION = "isort==5.11.0"
LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
DEFAULT_PYTHON_VERSION = "3.8"
-UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+UNIT_TEST_PYTHON_VERSIONS: List[str] = [
+ "3.7",
+ "3.8",
+ "3.9",
+ "3.10",
+ "3.11",
+ "3.12",
+ "3.13",
+]
UNIT_TEST_STANDARD_DEPENDENCIES = [
"mock",
"asyncmock",
@@ -40,36 +50,36 @@
"pytest-cov",
"pytest-asyncio",
]
-UNIT_TEST_EXTERNAL_DEPENDENCIES = [
+UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [
"flask",
"webob",
"django",
]
-UNIT_TEST_LOCAL_DEPENDENCIES = []
-UNIT_TEST_DEPENDENCIES = []
-UNIT_TEST_EXTRAS = []
-UNIT_TEST_EXTRAS_BY_PYTHON = {}
+UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = []
+UNIT_TEST_DEPENDENCIES: List[str] = []
+UNIT_TEST_EXTRAS: List[str] = []
+UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {}
-SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-SYSTEM_TEST_STANDARD_DEPENDENCIES = [
+SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"]
+SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [
"mock",
"pytest",
"google-cloud-testutils",
]
-SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [
+SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [
"google-cloud-bigquery",
"google-cloud-pubsub",
"google-cloud-storage",
"google-cloud-testutils",
+ "opentelemetry-sdk",
]
-SYSTEM_TEST_LOCAL_DEPENDENCIES = []
-SYSTEM_TEST_DEPENDENCIES = []
-SYSTEM_TEST_EXTRAS = []
-SYSTEM_TEST_EXTRAS_BY_PYTHON = {}
+SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = []
+SYSTEM_TEST_DEPENDENCIES: List[str] = []
+SYSTEM_TEST_EXTRAS: List[str] = []
+SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {}
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
-# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
nox.options.sessions = [
"unit",
"system",
@@ -78,6 +88,8 @@
"lint_setup_py",
"blacken",
"docs",
+ "docfx",
+ "format",
]
# Error if a python version is missing
@@ -165,14 +177,28 @@ def install_unittest_dependencies(session, *constraints):
session.install("-e", ".", *constraints)
-def default(session):
+@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
+@nox.parametrize(
+ "protobuf_implementation",
+ ["python", "upb", "cpp"],
+)
+def unit(session, protobuf_implementation):
# Install all test dependencies, then install this package in-place.
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+ session.skip("cpp implementation is not supported in python 3.11+")
+
constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
install_unittest_dependencies(session, "-c", constraints_path)
+ # TODO(https://github.com/googleapis/synthtool/issues/1976):
+ # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped.
+ # The 'cpp' implementation requires Protobuf<4.
+ if protobuf_implementation == "cpp":
+ session.install("protobuf<4")
+
# Run py.test against the unit tests.
session.run(
"py.test",
@@ -186,17 +212,13 @@ def default(session):
"--cov-fail-under=0",
os.path.join("tests", "unit"),
*session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ },
)
-@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
-def unit(session):
- """Run the unit test suite."""
- default(session)
-
-
def install_systemtest_dependencies(session, *constraints):
-
# Use pre-release gRPC for system tests.
# Exclude version 1.52.0rc1 which has a known issue.
# See https://github.com/grpc/grpc/issues/32163
@@ -282,13 +304,22 @@ def cover(session):
session.run("coverage", "erase")
-@nox.session(python="3.9")
+@nox.session(python="3.10")
def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
session.install(
- "sphinx==4.0.1",
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "sphinx==4.5.0",
"alabaster",
"recommonmark",
)
@@ -308,12 +339,21 @@ def docs(session):
)
-@nox.session(python="3.9")
+@nox.session(python="3.10")
def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
session.install(
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
"gcp-sphinx-docfx-yaml",
"alabaster",
"recommonmark",
@@ -345,10 +385,17 @@ def docfx(session):
)
-@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
-def prerelease_deps(session):
+@nox.session(python="3.13")
+@nox.parametrize(
+ "protobuf_implementation",
+ ["python", "upb", "cpp"],
+)
+def prerelease_deps(session, protobuf_implementation):
"""Run all tests with prerelease versions of dependencies installed."""
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+ session.skip("cpp implementation is not supported in python 3.11+")
+
# Install all dependencies
session.install("-e", ".[all, tests, tracing]")
unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES
@@ -380,12 +427,13 @@ def prerelease_deps(session):
session.install(*constraints_deps)
prerel_deps = [
+ "google-cloud-audit-log",
"protobuf",
# dependency of grpc
"six",
+ "grpc-google-iam-v1",
"googleapis-common-protos",
- # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163
- "grpcio!=1.52.0rc1",
+ "grpcio",
"grpcio-status",
"google-api-core",
"google-auth",
@@ -411,7 +459,13 @@ def prerelease_deps(session):
session.run("python", "-c", "import grpc; print(grpc.__version__)")
session.run("python", "-c", "import google.auth; print(google.auth.__version__)")
- session.run("py.test", "tests/unit")
+ session.run(
+ "py.test",
+ "tests/unit",
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ },
+ )
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
@@ -424,6 +478,9 @@ def prerelease_deps(session):
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_path,
*session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ },
)
if os.path.exists(system_test_folder_path):
session.run(
@@ -432,4 +489,7 @@ def prerelease_deps(session):
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_folder_path,
*session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ },
)
diff --git a/owlbot.py b/owlbot.py
index 3e932c854..2be8464c2 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import glob
import json
import os
import shutil
@@ -66,12 +67,13 @@ def place_before(path, text, *before_text, escape=None):
s.move([library], excludes=[
"**/gapic_version.py",
"setup.py",
- "testing/constraints-3.7.txt",
+ "testing/constraints*.txt",
"README.rst",
"google/cloud/logging/__init__.py", # generated types are hidden from users
"google/cloud/logging_v2/__init__.py",
"docs/index.rst",
"docs/logging_v2", # Don't include gapic library docs. Users should use the hand-written layer instead
+ "docs/multiprocessing.rst",
"scripts/fixup_logging_v2_keywords.py", # don't include script since it only works for generated layer
],
)
@@ -91,7 +93,9 @@ def place_before(path, text, *before_text, escape=None):
"google-cloud-pubsub",
"google-cloud-storage",
"google-cloud-testutils",
+ "opentelemetry-sdk"
],
+ system_test_python_versions=["3.12"],
unit_test_external_dependencies=["flask", "webob", "django"],
samples=True,
)
@@ -107,6 +111,13 @@ def place_before(path, text, *before_text, escape=None):
"README.rst", # This repo has a customized README
],
)
+s.replace("noxfile.py",
+"""prerel_deps = \[
+ "protobuf",""",
+"""prerel_deps = [
+ "google-cloud-audit-log",
+ "protobuf",""",
+)
# adjust .trampolinerc for environment tests
s.replace(".trampolinerc", "required_envvars[^\)]*\)", "required_envvars+=()")
@@ -116,13 +127,6 @@ def place_before(path, text, *before_text, escape=None):
'pass_down_envvars+=(\n "ENVIRONMENT"\n "RUNTIME"',
)
-# don't lint environment tests
-s.replace(
- ".flake8",
- "exclude =",
- "exclude =\n # Exclude environment test code.\n tests/environment/**\n",
-)
-
# use conventional commits for renovate bot
s.replace(
"renovate.json",
@@ -139,6 +143,110 @@ def place_before(path, text, *before_text, escape=None):
python.py_samples()
+# For autogenerated sample code, resolve object paths by finding the specific subpackage
+# the object belongs to. This is because we leave out all autogenerated packages from the
+# __init__.py of logging_v2. For now, this is manually copy-pasted from the __all__s of each
+# subpackage's __init__.py.
+gapic_objects = {
+ "logging_v2.services.config_service_v2": [
+ "ConfigServiceV2Client",
+ "ConfigServiceV2AsyncClient"
+ ],
+ "logging_v2.services.logging_service_v2": [
+ "LoggingServiceV2Client",
+ "LoggingServiceV2AsyncClient"
+ ],
+ "logging_v2.services.metrics_service_v2": [
+ "MetricsServiceV2Client",
+ "MetricsServiceV2AsyncClient"
+ ],
+ "logging_v2.types": [
+ "LogEntry",
+ "LogEntryOperation",
+ "LogEntrySourceLocation",
+ "LogSplit",
+ "DeleteLogRequest",
+ "ListLogEntriesRequest",
+ "ListLogEntriesResponse",
+ "ListLogsRequest",
+ "ListLogsResponse",
+ "ListMonitoredResourceDescriptorsRequest",
+ "ListMonitoredResourceDescriptorsResponse",
+ "TailLogEntriesRequest",
+ "TailLogEntriesResponse",
+ "WriteLogEntriesPartialErrors",
+ "WriteLogEntriesRequest",
+ "WriteLogEntriesResponse",
+ "BigQueryDataset",
+ "BigQueryOptions",
+ "BucketMetadata",
+ "CmekSettings",
+ "CopyLogEntriesMetadata",
+ "CopyLogEntriesRequest",
+ "CopyLogEntriesResponse",
+ "CreateBucketRequest",
+ "CreateExclusionRequest",
+ "CreateLinkRequest",
+ "CreateSinkRequest",
+ "CreateViewRequest",
+ "DeleteBucketRequest",
+ "DeleteExclusionRequest",
+ "DeleteLinkRequest",
+ "DeleteSinkRequest",
+ "DeleteViewRequest",
+ "GetBucketRequest",
+ "GetCmekSettingsRequest",
+ "GetExclusionRequest",
+ "GetLinkRequest",
+ "GetSettingsRequest",
+ "GetSinkRequest",
+ "GetViewRequest",
+ "IndexConfig",
+ "Link",
+ "LinkMetadata",
+ "ListBucketsRequest",
+ "ListBucketsResponse",
+ "ListExclusionsRequest",
+ "ListExclusionsResponse",
+ "ListLinksRequest",
+ "ListLinksResponse",
+ "ListSinksRequest",
+ "ListSinksResponse",
+ "ListViewsRequest",
+ "ListViewsResponse",
+ "LocationMetadata",
+ "LogBucket",
+ "LogExclusion",
+ "LogSink",
+ "LogView",
+ "Settings",
+ "UndeleteBucketRequest",
+ "UpdateBucketRequest",
+ "UpdateCmekSettingsRequest",
+ "UpdateExclusionRequest",
+ "UpdateSettingsRequest",
+ "UpdateSinkRequest",
+ "UpdateViewRequest",
+ "IndexType",
+ "LifecycleState",
+ "OperationState",
+ "CreateLogMetricRequest",
+ "DeleteLogMetricRequest",
+ "GetLogMetricRequest",
+ "ListLogMetricsRequest",
+ "ListLogMetricsResponse",
+ "LogMetric",
+ "UpdateLogMetricRequest"
+ ]
+}
+
+sample_files = glob.glob("samples/generated_samples/logging_v2_*.py")
+for subpackage_name in gapic_objects:
+ for object_name in gapic_objects[subpackage_name]:
+ text = "logging_v2." + object_name
+ replacement = subpackage_name + "." + object_name
+ s.replace(sample_files, text, replacement)
+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
s.shell.run(["nox", "-s", "blacken"], cwd="samples/snippets", hide_output=False)
@@ -147,7 +255,7 @@ def place_before(path, text, *before_text, escape=None):
# --------------------------------------------------------------------------
# add shared environment variables to test configs
-tracked_subdirs = ["continuous", "presubmit", "release", "samples", "docs"]
+tracked_subdirs = ["continuous", "presubmit", "samples"]
for subdir in tracked_subdirs:
for path, subdirs, files in os.walk(f".kokoro/{subdir}"):
for name in files:
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 000000000..2d8ce14b8
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,29 @@
+[pytest]
+filterwarnings =
+ # treat all warnings as errors
+ error
+ # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed
+ ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning
+ # Remove once Release PR https://github.com/googleapis/python-api-common-protos/pull/191 is merged
+ ignore:.*pkg_resources.declare_namespace:DeprecationWarning
+ ignore:.*pkg_resources is deprecated as an API:DeprecationWarning
+ # Remove warning once https://github.com/grpc/grpc/issues/35974 is fixed
+ ignore:unclosed:ResourceWarning
+ # Remove after support for Python 3.7 is dropped
+ ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning
+ # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed
+ ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning
+ # DeprecationWarnings triggered by Flask 1.0 testing by Flask dependencies in test code
+ # 3.7 deprecation warnings
+ ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3,and in 3.9 it will stop working:DeprecationWarning
+ # 3.8 - 3.9 deprecation warnings
+ ignore:Importing 'itsdangerous.json' is deprecated and will be removed in ItsDangerous 2.1. Use Python's 'json' module instead.:DeprecationWarning
+ ignore:Using or importing the ABCs from 'collections' instead of from 'collections.abc' is deprecated since Python 3.3, and in 3.10 it will stop working:DeprecationWarning
+ # 3.12 deprecation warnings
+ ignore:Attribute s is deprecated and will be removed in Python 3.14; use value instead:DeprecationWarning
+ ignore:ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead:DeprecationWarning
+ ignore:'pkgutil.get_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec\(\) instead:DeprecationWarning
+ # Remove warning once https://github.com/protocolbuffers/protobuf/issues/17345 is fixed
+ ignore:.*Please use message_factory.GetMessageClass\(\) instead. SymbolDatabase.GetPrototype\(\) will be removed soon.:UserWarning
+ # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/2046 is fixed
+ ignore:coroutine 'AsyncMockMixin._execute_mock_call' was never awaited:RuntimeWarning
diff --git a/renovate.json b/renovate.json
index dde963098..ff5e5c4c6 100644
--- a/renovate.json
+++ b/renovate.json
@@ -5,7 +5,7 @@
":preserveSemverRanges",
":disableDependencyDashboard"
],
- "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"],
+ "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"],
"pip_requirements": {
"fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
},
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py
index 806e937dd..f4a59244d 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_copy_log_entries():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.CopyLogEntriesRequest(
+ request = logging_v2.types.CopyLogEntriesRequest(
name="name_value",
destination="destination_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py
index ca0209f00..896bfe77d 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_copy_log_entries():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.CopyLogEntriesRequest(
+ request = logging_v2.types.CopyLogEntriesRequest(
name="name_value",
destination="destination_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py
index c1f028fb0..25292de97 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_create_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.CreateBucketRequest(
+ request = logging_v2.types.CreateBucketRequest(
parent="parent_value",
bucket_id="bucket_id_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py
index 8fe42df3c..95c692aac 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_create_bucket_async():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.CreateBucketRequest(
+ request = logging_v2.types.CreateBucketRequest(
parent="parent_value",
bucket_id="bucket_id_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py
index 1ce698784..d1da5bbe7 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_create_bucket_async():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.CreateBucketRequest(
+ request = logging_v2.types.CreateBucketRequest(
parent="parent_value",
bucket_id="bucket_id_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py
index dc73253f4..395188ef8 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_create_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.CreateBucketRequest(
+ request = logging_v2.types.CreateBucketRequest(
parent="parent_value",
bucket_id="bucket_id_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py
index 17490c61e..10cb193be 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
async def sample_create_exclusion():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- exclusion = logging_v2.LogExclusion()
+ exclusion = logging_v2.types.LogExclusion()
exclusion.name = "name_value"
exclusion.filter = "filter_value"
- request = logging_v2.CreateExclusionRequest(
+ request = logging_v2.types.CreateExclusionRequest(
parent="parent_value",
exclusion=exclusion,
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py
index 75ec32f48..a52541ea4 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
def sample_create_exclusion():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- exclusion = logging_v2.LogExclusion()
+ exclusion = logging_v2.types.LogExclusion()
exclusion.name = "name_value"
exclusion.filter = "filter_value"
- request = logging_v2.CreateExclusionRequest(
+ request = logging_v2.types.CreateExclusionRequest(
parent="parent_value",
exclusion=exclusion,
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py
index 8ceb52985..8e4558480 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_create_link():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.CreateLinkRequest(
+ request = logging_v2.types.CreateLinkRequest(
parent="parent_value",
link_id="link_id_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py
index 604ff6626..e1f948394 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_create_link():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.CreateLinkRequest(
+ request = logging_v2.types.CreateLinkRequest(
parent="parent_value",
link_id="link_id_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py
index 277e83055..e83d648a1 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
async def sample_create_sink():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- sink = logging_v2.LogSink()
+ sink = logging_v2.types.LogSink()
sink.name = "name_value"
sink.destination = "destination_value"
- request = logging_v2.CreateSinkRequest(
+ request = logging_v2.types.CreateSinkRequest(
parent="parent_value",
sink=sink,
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py
index a4df02994..aa694125d 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
def sample_create_sink():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- sink = logging_v2.LogSink()
+ sink = logging_v2.types.LogSink()
sink.name = "name_value"
sink.destination = "destination_value"
- request = logging_v2.CreateSinkRequest(
+ request = logging_v2.types.CreateSinkRequest(
parent="parent_value",
sink=sink,
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py
index 5cd201276..f40f1f6d5 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_create_view():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.CreateViewRequest(
+ request = logging_v2.types.CreateViewRequest(
parent="parent_value",
view_id="view_id_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py
index cd3ca94e1..d4f174cf5 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_create_view():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.CreateViewRequest(
+ request = logging_v2.types.CreateViewRequest(
parent="parent_value",
view_id="view_id_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py
index fcffb6db8..d56884bf3 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_delete_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.DeleteBucketRequest(
+ request = logging_v2.types.DeleteBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py
index a8f902116..0b4b38bf4 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_delete_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.DeleteBucketRequest(
+ request = logging_v2.types.DeleteBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py
index b426d4703..1004cf6e8 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_delete_exclusion():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.DeleteExclusionRequest(
+ request = logging_v2.types.DeleteExclusionRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py
index 5d98f782b..e3c3f0be0 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_delete_exclusion():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.DeleteExclusionRequest(
+ request = logging_v2.types.DeleteExclusionRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py
index 8c7a934a7..30407d37e 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_delete_link():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.DeleteLinkRequest(
+ request = logging_v2.types.DeleteLinkRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py
index dfa59b307..8ddbbcaf3 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_delete_link():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.DeleteLinkRequest(
+ request = logging_v2.types.DeleteLinkRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py
index 11d91947e..17ed0b302 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_delete_sink():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.DeleteSinkRequest(
+ request = logging_v2.types.DeleteSinkRequest(
sink_name="sink_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py
index bf9875b0a..bc82c3aa0 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_delete_sink():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.DeleteSinkRequest(
+ request = logging_v2.types.DeleteSinkRequest(
sink_name="sink_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py
index fe9c7031e..ca77b619c 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_delete_view():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.DeleteViewRequest(
+ request = logging_v2.types.DeleteViewRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py
index b5539c04f..f6cd1237a 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_delete_view():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.DeleteViewRequest(
+ request = logging_v2.types.DeleteViewRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py
index c29c35a4e..fd2b2a966 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_get_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.GetBucketRequest(
+ request = logging_v2.types.GetBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py
index 818ab646f..728615d94 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_get_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.GetBucketRequest(
+ request = logging_v2.types.GetBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py
index 3ae13401d..eb661ea33 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_get_cmek_settings():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.GetCmekSettingsRequest(
+ request = logging_v2.types.GetCmekSettingsRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py
index 499d68bdb..cc0b8e49a 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_get_cmek_settings():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.GetCmekSettingsRequest(
+ request = logging_v2.types.GetCmekSettingsRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py
index 03b7dc7d8..c3a10df02 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_get_exclusion():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.GetExclusionRequest(
+ request = logging_v2.types.GetExclusionRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py
index a445ed396..0009b048b 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_get_exclusion():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.GetExclusionRequest(
+ request = logging_v2.types.GetExclusionRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py
index ddc3d131f..2f1e4647f 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_get_link():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.GetLinkRequest(
+ request = logging_v2.types.GetLinkRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py
index 3a7643b3a..8c8d9ddc1 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_get_link():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.GetLinkRequest(
+ request = logging_v2.types.GetLinkRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py
index 4ee968e81..41abc13ac 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_get_settings():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.GetSettingsRequest(
+ request = logging_v2.types.GetSettingsRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py
index a3e018440..93bb910a5 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_get_settings():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.GetSettingsRequest(
+ request = logging_v2.types.GetSettingsRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py
index b6fe5b11e..bdc293b5b 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_get_sink():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.GetSinkRequest(
+ request = logging_v2.types.GetSinkRequest(
sink_name="sink_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py
index ecebaf119..a4b12474d 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_get_sink():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.GetSinkRequest(
+ request = logging_v2.types.GetSinkRequest(
sink_name="sink_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py
index 5992e53ee..4474f6c28 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_get_view():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.GetViewRequest(
+ request = logging_v2.types.GetViewRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py
index 14d8679bc..ce5680889 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_get_view():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.GetViewRequest(
+ request = logging_v2.types.GetViewRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py
index dfbc3b411..d4aebfa95 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_buckets():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListBucketsRequest(
+ request = logging_v2.types.ListBucketsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py
index 8626f7cae..7fd1e53de 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_buckets():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListBucketsRequest(
+ request = logging_v2.types.ListBucketsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py
index ad42edff5..199b152b9 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_exclusions():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListExclusionsRequest(
+ request = logging_v2.types.ListExclusionsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py
index 727723a7a..7eafeb00e 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_exclusions():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListExclusionsRequest(
+ request = logging_v2.types.ListExclusionsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py
index 7eccffaa6..a8eee844c 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_links():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListLinksRequest(
+ request = logging_v2.types.ListLinksRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py
index a2f98d69d..d6fde0bb7 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_links():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListLinksRequest(
+ request = logging_v2.types.ListLinksRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py
index b642d38ee..33e10deb2 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_sinks():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListSinksRequest(
+ request = logging_v2.types.ListSinksRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py
index b4fc92452..162119bc1 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_sinks():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListSinksRequest(
+ request = logging_v2.types.ListSinksRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py
index 1542a5a38..43e835259 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_views():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListViewsRequest(
+ request = logging_v2.types.ListViewsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py
index b273c465d..dbddce32c 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_views():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListViewsRequest(
+ request = logging_v2.types.ListViewsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py
index d2695708d..7650a542f 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_undelete_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.UndeleteBucketRequest(
+ request = logging_v2.types.UndeleteBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py
index 8d25c7d33..c320d118e 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_undelete_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.UndeleteBucketRequest(
+ request = logging_v2.types.UndeleteBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py
index e1c741b67..4566ea26a 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_update_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.UpdateBucketRequest(
+ request = logging_v2.types.UpdateBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py
index 7dde59dcd..f127c9904 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_update_bucket_async():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.UpdateBucketRequest(
+ request = logging_v2.types.UpdateBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py
index 2ecaf8df2..550aed7fb 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_update_bucket_async():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.UpdateBucketRequest(
+ request = logging_v2.types.UpdateBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py
index 7b4a3c597..bfa58357d 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_update_bucket():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.UpdateBucketRequest(
+ request = logging_v2.types.UpdateBucketRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py
index 96fc8ff97..22e799e9d 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_update_cmek_settings():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.UpdateCmekSettingsRequest(
+ request = logging_v2.types.UpdateCmekSettingsRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py
index 9bbc7dcb1..766376e0e 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_update_cmek_settings():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.UpdateCmekSettingsRequest(
+ request = logging_v2.types.UpdateCmekSettingsRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py
index d8b8d7f7b..a19fbea8c 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
async def sample_update_exclusion():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- exclusion = logging_v2.LogExclusion()
+ exclusion = logging_v2.types.LogExclusion()
exclusion.name = "name_value"
exclusion.filter = "filter_value"
- request = logging_v2.UpdateExclusionRequest(
+ request = logging_v2.types.UpdateExclusionRequest(
name="name_value",
exclusion=exclusion,
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py
index 36d5776e3..3ba3a8f24 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
def sample_update_exclusion():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- exclusion = logging_v2.LogExclusion()
+ exclusion = logging_v2.types.LogExclusion()
exclusion.name = "name_value"
exclusion.filter = "filter_value"
- request = logging_v2.UpdateExclusionRequest(
+ request = logging_v2.types.UpdateExclusionRequest(
name="name_value",
exclusion=exclusion,
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py
index b51dd81cc..84a3dd245 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_update_settings():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.UpdateSettingsRequest(
+ request = logging_v2.types.UpdateSettingsRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py
index 1e7aefce8..3754cd2e7 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_update_settings():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.UpdateSettingsRequest(
+ request = logging_v2.types.UpdateSettingsRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py
index aef847379..f2c54208b 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
async def sample_update_sink():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- sink = logging_v2.LogSink()
+ sink = logging_v2.types.LogSink()
sink.name = "name_value"
sink.destination = "destination_value"
- request = logging_v2.UpdateSinkRequest(
+ request = logging_v2.types.UpdateSinkRequest(
sink_name="sink_name_value",
sink=sink,
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py
index e84230202..6238f9a10 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
def sample_update_sink():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- sink = logging_v2.LogSink()
+ sink = logging_v2.types.LogSink()
sink.name = "name_value"
sink.destination = "destination_value"
- request = logging_v2.UpdateSinkRequest(
+ request = logging_v2.types.UpdateSinkRequest(
sink_name="sink_name_value",
sink=sink,
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py
index f143a772c..9ba5fc19f 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_update_view():
# Create a client
- client = logging_v2.ConfigServiceV2AsyncClient()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.UpdateViewRequest(
+ request = logging_v2.types.UpdateViewRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py
index 3867d3702..97a2ebe4d 100644
--- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py
+++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_update_view():
# Create a client
- client = logging_v2.ConfigServiceV2Client()
+ client = logging_v2.services.config_service_v2.ConfigServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.UpdateViewRequest(
+ request = logging_v2.types.UpdateViewRequest(
name="name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py
index c6469053b..89495ff0b 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_delete_log():
# Create a client
- client = logging_v2.LoggingServiceV2AsyncClient()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.DeleteLogRequest(
+ request = logging_v2.types.DeleteLogRequest(
log_name="log_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py
index 1e4e28abc..24e4cd924 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_delete_log():
# Create a client
- client = logging_v2.LoggingServiceV2Client()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.DeleteLogRequest(
+ request = logging_v2.types.DeleteLogRequest(
log_name="log_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py
index d5cfe190c..e5226e985 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_log_entries():
# Create a client
- client = logging_v2.LoggingServiceV2AsyncClient()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListLogEntriesRequest(
+ request = logging_v2.types.ListLogEntriesRequest(
resource_names=['resource_names_value1', 'resource_names_value2'],
)
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py
index d24923cb1..3fb660f90 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_log_entries():
# Create a client
- client = logging_v2.LoggingServiceV2Client()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListLogEntriesRequest(
+ request = logging_v2.types.ListLogEntriesRequest(
resource_names=['resource_names_value1', 'resource_names_value2'],
)
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py
index 71859024d..4e3153fd6 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_logs():
# Create a client
- client = logging_v2.LoggingServiceV2AsyncClient()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListLogsRequest(
+ request = logging_v2.types.ListLogsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py
index 5a5ff140c..fa44c8cc7 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_logs():
# Create a client
- client = logging_v2.LoggingServiceV2Client()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListLogsRequest(
+ request = logging_v2.types.ListLogsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py
index 519a2498a..054520607 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_monitored_resource_descriptors():
# Create a client
- client = logging_v2.LoggingServiceV2AsyncClient()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListMonitoredResourceDescriptorsRequest(
+ request = logging_v2.types.ListMonitoredResourceDescriptorsRequest(
)
# Make the request
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py
index ca97be4b3..cd404c1e1 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_monitored_resource_descriptors():
# Create a client
- client = logging_v2.LoggingServiceV2Client()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListMonitoredResourceDescriptorsRequest(
+ request = logging_v2.types.ListMonitoredResourceDescriptorsRequest(
)
# Make the request
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py
index 24e9e2009..81eb975b9 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,15 +36,15 @@
async def sample_tail_log_entries():
# Create a client
- client = logging_v2.LoggingServiceV2AsyncClient()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.TailLogEntriesRequest(
+ request = logging_v2.types.TailLogEntriesRequest(
resource_names=['resource_names_value1', 'resource_names_value2'],
)
# This method expects an iterator which contains
- # 'logging_v2.TailLogEntriesRequest' objects
+ # 'logging_v2.types.TailLogEntriesRequest' objects
# Here we create a generator that yields a single `request` for
# demonstrative purposes.
requests = [request]
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py
index dc9a545e7..5d91ceb7e 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,15 +36,15 @@
def sample_tail_log_entries():
# Create a client
- client = logging_v2.LoggingServiceV2Client()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.TailLogEntriesRequest(
+ request = logging_v2.types.TailLogEntriesRequest(
resource_names=['resource_names_value1', 'resource_names_value2'],
)
# This method expects an iterator which contains
- # 'logging_v2.TailLogEntriesRequest' objects
+ # 'logging_v2.types.TailLogEntriesRequest' objects
# Here we create a generator that yields a single `request` for
# demonstrative purposes.
requests = [request]
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py
index 1a0d48664..300dd7819 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,13 +36,13 @@
async def sample_write_log_entries():
# Create a client
- client = logging_v2.LoggingServiceV2AsyncClient()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient()
# Initialize request argument(s)
- entries = logging_v2.LogEntry()
+ entries = logging_v2.types.LogEntry()
entries.log_name = "log_name_value"
- request = logging_v2.WriteLogEntriesRequest(
+ request = logging_v2.types.WriteLogEntriesRequest(
entries=entries,
)
diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py
index de4bfe6c8..7b16177ce 100644
--- a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py
+++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,13 +36,13 @@
def sample_write_log_entries():
# Create a client
- client = logging_v2.LoggingServiceV2Client()
+ client = logging_v2.services.logging_service_v2.LoggingServiceV2Client()
# Initialize request argument(s)
- entries = logging_v2.LogEntry()
+ entries = logging_v2.types.LogEntry()
entries.log_name = "log_name_value"
- request = logging_v2.WriteLogEntriesRequest(
+ request = logging_v2.types.WriteLogEntriesRequest(
entries=entries,
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py
index 557d32293..8ea8a849b 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
async def sample_create_log_metric():
# Create a client
- client = logging_v2.MetricsServiceV2AsyncClient()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient()
# Initialize request argument(s)
- metric = logging_v2.LogMetric()
+ metric = logging_v2.types.LogMetric()
metric.name = "name_value"
metric.filter = "filter_value"
- request = logging_v2.CreateLogMetricRequest(
+ request = logging_v2.types.CreateLogMetricRequest(
parent="parent_value",
metric=metric,
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py
index b9e9cade9..f67758eae 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
def sample_create_log_metric():
# Create a client
- client = logging_v2.MetricsServiceV2Client()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client()
# Initialize request argument(s)
- metric = logging_v2.LogMetric()
+ metric = logging_v2.types.LogMetric()
metric.name = "name_value"
metric.filter = "filter_value"
- request = logging_v2.CreateLogMetricRequest(
+ request = logging_v2.types.CreateLogMetricRequest(
parent="parent_value",
metric=metric,
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py
index fea40e7a4..68a5e6c8d 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_delete_log_metric():
# Create a client
- client = logging_v2.MetricsServiceV2AsyncClient()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.DeleteLogMetricRequest(
+ request = logging_v2.types.DeleteLogMetricRequest(
metric_name="metric_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py
index ccf2983be..462f8bc32 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_delete_log_metric():
# Create a client
- client = logging_v2.MetricsServiceV2Client()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.DeleteLogMetricRequest(
+ request = logging_v2.types.DeleteLogMetricRequest(
metric_name="metric_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py
index 4b6984171..9eeea97fc 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_get_log_metric():
# Create a client
- client = logging_v2.MetricsServiceV2AsyncClient()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.GetLogMetricRequest(
+ request = logging_v2.types.GetLogMetricRequest(
metric_name="metric_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py
index abb071c65..cf60c346d 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_get_log_metric():
# Create a client
- client = logging_v2.MetricsServiceV2Client()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.GetLogMetricRequest(
+ request = logging_v2.types.GetLogMetricRequest(
metric_name="metric_name_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py
index f280ec9de..f7bc654c8 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
async def sample_list_log_metrics():
# Create a client
- client = logging_v2.MetricsServiceV2AsyncClient()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient()
# Initialize request argument(s)
- request = logging_v2.ListLogMetricsRequest(
+ request = logging_v2.types.ListLogMetricsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py
index bffbe10a8..4ae9a795e 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,10 +36,10 @@
def sample_list_log_metrics():
# Create a client
- client = logging_v2.MetricsServiceV2Client()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client()
# Initialize request argument(s)
- request = logging_v2.ListLogMetricsRequest(
+ request = logging_v2.types.ListLogMetricsRequest(
parent="parent_value",
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py
index 59bfeeaaa..2c3669bca 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
async def sample_update_log_metric():
# Create a client
- client = logging_v2.MetricsServiceV2AsyncClient()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient()
# Initialize request argument(s)
- metric = logging_v2.LogMetric()
+ metric = logging_v2.types.LogMetric()
metric.name = "name_value"
metric.filter = "filter_value"
- request = logging_v2.UpdateLogMetricRequest(
+ request = logging_v2.types.UpdateLogMetricRequest(
metric_name="metric_name_value",
metric=metric,
)
diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py
index ed4dd0126..2fed9c39e 100644
--- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py
+++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -36,14 +36,14 @@
def sample_update_log_metric():
# Create a client
- client = logging_v2.MetricsServiceV2Client()
+ client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client()
# Initialize request argument(s)
- metric = logging_v2.LogMetric()
+ metric = logging_v2.types.LogMetric()
metric.name = "name_value"
metric.filter = "filter_value"
- request = logging_v2.UpdateLogMetricRequest(
+ request = logging_v2.types.UpdateLogMetricRequest(
metric_name="metric_name_value",
metric=metric,
)
diff --git a/samples/generated_samples/snippet_metadata_google.logging.v2.json b/samples/generated_samples/snippet_metadata_google.logging.v2.json
index b62675ba6..7d77545ab 100644
--- a/samples/generated_samples/snippet_metadata_google.logging.v2.json
+++ b/samples/generated_samples/snippet_metadata_google.logging.v2.json
@@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-logging",
- "version": "0.1.0"
+ "version": "3.12.1"
},
"snippets": [
{
@@ -43,7 +43,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation_async.AsyncOperation",
@@ -119,7 +119,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation.Operation",
@@ -196,7 +196,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation_async.AsyncOperation",
@@ -272,7 +272,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation.Operation",
@@ -349,7 +349,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogBucket",
@@ -425,7 +425,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogBucket",
@@ -510,7 +510,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogExclusion",
@@ -594,7 +594,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogExclusion",
@@ -683,7 +683,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation_async.AsyncOperation",
@@ -771,7 +771,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation.Operation",
@@ -856,7 +856,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogSink",
@@ -940,7 +940,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogSink",
@@ -1017,7 +1017,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogView",
@@ -1093,7 +1093,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogView",
@@ -1170,7 +1170,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_bucket"
@@ -1243,7 +1243,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_bucket"
@@ -1321,7 +1321,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_exclusion"
@@ -1398,7 +1398,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_exclusion"
@@ -1476,7 +1476,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation_async.AsyncOperation",
@@ -1556,7 +1556,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation.Operation",
@@ -1637,7 +1637,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_sink"
@@ -1714,7 +1714,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_sink"
@@ -1788,7 +1788,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_view"
@@ -1861,7 +1861,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_view"
@@ -1935,7 +1935,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogBucket",
@@ -2011,7 +2011,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogBucket",
@@ -2088,7 +2088,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.CmekSettings",
@@ -2164,7 +2164,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.CmekSettings",
@@ -2245,7 +2245,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogExclusion",
@@ -2325,7 +2325,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogExclusion",
@@ -2406,7 +2406,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.Link",
@@ -2486,7 +2486,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.Link",
@@ -2567,7 +2567,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.Settings",
@@ -2647,7 +2647,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.Settings",
@@ -2728,7 +2728,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogSink",
@@ -2808,7 +2808,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogSink",
@@ -2885,7 +2885,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogView",
@@ -2961,7 +2961,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogView",
@@ -3042,7 +3042,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager",
@@ -3122,7 +3122,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager",
@@ -3203,7 +3203,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager",
@@ -3283,7 +3283,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager",
@@ -3364,7 +3364,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager",
@@ -3444,7 +3444,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager",
@@ -3525,7 +3525,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager",
@@ -3605,7 +3605,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager",
@@ -3686,7 +3686,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager",
@@ -3766,7 +3766,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager",
@@ -3843,7 +3843,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "undelete_bucket"
@@ -3916,7 +3916,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "undelete_bucket"
@@ -3990,7 +3990,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation_async.AsyncOperation",
@@ -4066,7 +4066,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.api_core.operation.Operation",
@@ -4143,7 +4143,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogBucket",
@@ -4219,7 +4219,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogBucket",
@@ -4296,7 +4296,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.CmekSettings",
@@ -4372,7 +4372,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.CmekSettings",
@@ -4461,7 +4461,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogExclusion",
@@ -4549,7 +4549,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogExclusion",
@@ -4634,7 +4634,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.Settings",
@@ -4718,7 +4718,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.Settings",
@@ -4807,7 +4807,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogSink",
@@ -4895,7 +4895,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogSink",
@@ -4972,7 +4972,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogView",
@@ -5048,7 +5048,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogView",
@@ -5129,7 +5129,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_log"
@@ -5206,7 +5206,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_log"
@@ -5292,7 +5292,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager",
@@ -5380,7 +5380,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager",
@@ -5461,7 +5461,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager",
@@ -5541,7 +5541,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager",
@@ -5618,7 +5618,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager",
@@ -5694,7 +5694,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager",
@@ -5771,7 +5771,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]",
@@ -5847,7 +5847,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]",
@@ -5940,7 +5940,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse",
@@ -6032,7 +6032,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse",
@@ -6117,7 +6117,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogMetric",
@@ -6201,7 +6201,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogMetric",
@@ -6282,7 +6282,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_log_metric"
@@ -6359,7 +6359,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"shortName": "delete_log_metric"
@@ -6437,7 +6437,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogMetric",
@@ -6517,7 +6517,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogMetric",
@@ -6598,7 +6598,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager",
@@ -6678,7 +6678,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager",
@@ -6763,7 +6763,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogMetric",
@@ -6847,7 +6847,7 @@
},
{
"name": "metadata",
- "type": "Sequence[Tuple[str, str]"
+ "type": "Sequence[Tuple[str, Union[str, bytes]]]"
}
],
"resultType": "google.cloud.logging_v2.types.LogMetric",
diff --git a/samples/snippets/export_test.py b/samples/snippets/export_test.py
index b1ecf4923..845359e09 100644
--- a/samples/snippets/export_test.py
+++ b/samples/snippets/export_test.py
@@ -13,19 +13,30 @@
# limitations under the License.
import os
+import re
import random
import string
+import time
import backoff
-from google.cloud import logging
+from google.cloud import logging, storage
import pytest
import export
BUCKET = os.environ["CLOUD_STORAGE_BUCKET"]
-TEST_SINK_NAME_TMPL = "example_sink_{}"
+TEST_SINK_NAME_TMPL = "example_sink_{}_{}"
TEST_SINK_FILTER = "severity>=CRITICAL"
+TIMESTAMP = int(time.time())
+
+# Threshold beyond which the cleanup_old_sinks fixture will delete
+# old sink, in seconds
+CLEANUP_THRESHOLD = 7200 # 2 hours
+
+# Max buckets to delete at a time, to mitigate operation timeout
+# issues. To turn off in the future, set to None.
+MAX_BUCKETS = 1500
def _random_id():
@@ -34,12 +45,49 @@ def _random_id():
)
-@pytest.yield_fixture
-def example_sink():
+def _create_sink_name():
+ return TEST_SINK_NAME_TMPL.format(TIMESTAMP, _random_id())
+
+
+@backoff.on_exception(backoff.expo, Exception, max_time=60, raise_on_giveup=False)
+def _delete_object(obj):
+ obj.delete()
+
+
+# Runs once for entire test suite
+@pytest.fixture(scope="module")
+def cleanup_old_sinks():
+ client = logging.Client()
+ test_sink_name_regex = (
+ r"^" + TEST_SINK_NAME_TMPL.format(r"(\d+)", r"[A-Z0-9]{6}") + r"$"
+ )
+ for sink in client.list_sinks():
+ match = re.match(test_sink_name_regex, sink.name)
+ if match:
+ sink_timestamp = int(match.group(1))
+ if TIMESTAMP - sink_timestamp > CLEANUP_THRESHOLD:
+ _delete_object(sink)
+
+ storage_client = storage.Client()
+
+ # See _sink_storage_setup in usage_guide.py for details about how
+ # sinks are named.
+ test_bucket_name_regex = r"^sink\-storage\-(\d+)$"
+ for bucket in storage_client.list_buckets(max_results=MAX_BUCKETS):
+ match = re.match(test_bucket_name_regex, bucket.name)
+ if match:
+ # Bucket timestamp is int(time.time() * 1000)
+ bucket_timestamp = int(match.group(1))
+ if TIMESTAMP - bucket_timestamp // 1000 > CLEANUP_THRESHOLD:
+ _delete_object(bucket)
+
+
+@pytest.fixture
+def example_sink(cleanup_old_sinks):
client = logging.Client()
sink = client.sink(
- TEST_SINK_NAME_TMPL.format(_random_id()),
+ _create_sink_name(),
filter_=TEST_SINK_FILTER,
destination="storage.googleapis.com/{bucket}".format(bucket=BUCKET),
)
@@ -48,10 +96,7 @@ def example_sink():
yield sink
- try:
- sink.delete()
- except Exception:
- pass
+ _delete_object(sink)
def test_list(example_sink, capsys):
@@ -65,16 +110,13 @@ def eventually_consistent_test():
def test_create(capsys):
- sink_name = TEST_SINK_NAME_TMPL.format(_random_id())
+ sink_name = _create_sink_name()
try:
export.create_sink(sink_name, BUCKET, TEST_SINK_FILTER)
# Clean-up the temporary sink.
finally:
- try:
- logging.Client().sink(sink_name).delete()
- except Exception:
- pass
+ _delete_object(logging.Client().sink(sink_name))
out, _ = capsys.readouterr()
assert sink_name in out
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index 1224cbe21..c9a3d1ecb 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"]
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt
index 1779d47d2..37eb1f9aa 100644
--- a/samples/snippets/requirements-test.txt
+++ b/samples/snippets/requirements-test.txt
@@ -1,2 +1,3 @@
backoff==2.2.1
-pytest==7.4.2
+pytest===7.4.4; python_version == '3.7'
+pytest==8.2.2; python_version >= '3.8'
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index 4c4ec825f..8a52ee5c6 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1,4 +1,4 @@
-google-cloud-logging==3.6.0
-google-cloud-bigquery==3.11.4
-google-cloud-storage==2.10.0
-google-cloud-pubsub==2.18.4
+google-cloud-logging==3.10.0
+google-cloud-bigquery==3.25.0
+google-cloud-storage==2.17.0
+google-cloud-pubsub==2.22.0
diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py
index 5c9e86990..6dee33798 100644
--- a/samples/snippets/usage_guide.py
+++ b/samples/snippets/usage_guide.py
@@ -475,13 +475,47 @@ def using_extras(client):
def setup_logging(client):
import logging
- # [START setup_logging]
+ # [START logging_setup_logging]
client.setup_logging(log_level=logging.INFO)
- # [END setup_logging]
+ # [END logging_setup_logging]
- # [START setup_logging_excludes]
+ # [START logging_setup_logging_excludes]
client.setup_logging(log_level=logging.INFO, excluded_loggers=("werkzeug",))
- # [END setup_logging_excludes]
+ # [END logging_setup_logging_excludes]
+
+
+@snippet
+def logging_dict_config(client):
+ # [START logging_dict_config]
+ import logging.config
+
+ import google.cloud.logging
+
+ client = google.cloud.logging.Client()
+
+ LOGGING = {
+ "version": 1,
+ "handlers": {
+ "cloud_logging_handler": {
+ "class": "google.cloud.logging.handlers.CloudLoggingHandler",
+ "client": client,
+ },
+ "structured_log_handler": {
+ "class": "google.cloud.logging.handlers.StructuredLogHandler"
+ },
+ },
+ "root": {"handlers": [], "level": "WARNING"},
+ "loggers": {
+ "cloud_logger": {"handlers": ["cloud_logging_handler"], "level": "INFO"},
+ "structured_logger": {
+ "handlers": ["structured_log_handler"],
+ "level": "INFO",
+ },
+ },
+ }
+
+ logging.config.dictConfig(LOGGING)
+ # [END logging_dict_config]
def _line_no(func):
diff --git a/samples/snippets/usage_guide_test.py b/samples/snippets/usage_guide_test.py
index f02d82fbd..3f606dd65 100644
--- a/samples/snippets/usage_guide_test.py
+++ b/samples/snippets/usage_guide_test.py
@@ -88,3 +88,9 @@ def test_client_list_entries():
for item in to_delete:
usage_guide._backoff_not_found(item.delete)
+
+
+def test_dict_config():
+ client = Client()
+
+ usage_guide.logging_dict_config(client)
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index 0018b421d..120b0ddc4 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-# Copyright 2023 Google LLC All rights reserved.
+# Copyright 2024 Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
index 1acc11983..8f5e248a0 100644
--- a/scripts/readme-gen/readme_gen.py
+++ b/scripts/readme-gen/readme_gen.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
-# Copyright 2023 Google LLC
+# Copyright 2024 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 052350089..000000000
--- a/setup.cfg
+++ /dev/null
@@ -1,19 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# Copyright 2023 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Generated by synthtool. DO NOT EDIT!
-[bdist_wheel]
-universal = 1
diff --git a/setup.py b/setup.py
index f43fd0bf9..c80db0467 100644
--- a/setup.py
+++ b/setup.py
@@ -36,14 +36,19 @@
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*",
- "google-cloud-appengine-logging>=0.1.0, <2.0.0dev",
- "google-cloud-audit-log >= 0.1.0, < 1.0.0dev",
- "google-cloud-core >= 2.0.0, <3.0.0dev",
- "grpc-google-iam-v1 >=0.12.4, <1.0.0dev",
- "proto-plus >= 1.22.0, <2.0.0dev",
- "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'",
- "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
+ "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*",
+ # Exclude incompatible versions of `google-auth`
+ # See https://github.com/googleapis/google-cloud-python/issues/12364
+ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0",
+ "google-cloud-appengine-logging>=0.1.3, <2.0.0",
+ "google-cloud-audit-log >= 0.3.1, < 1.0.0",
+ "google-cloud-core >= 2.0.0, <3.0.0",
+ "grpc-google-iam-v1 >=0.12.4, <1.0.0",
+ "opentelemetry-api >= 1.9.0",
+ "proto-plus >= 1.22.0, <2.0.0",
+ "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'",
+ "proto-plus >= 1.25.0, <2.0.0; python_version>='3.13'",
+ "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
]
url = "https://github.com/googleapis/python-logging"
@@ -55,14 +60,10 @@
packages = [
package
- for package in setuptools.PEP420PackageFinder.find()
+ for package in setuptools.find_namespace_packages()
if package.startswith("google")
]
-namespaces = ["google"]
-if "google.cloud" in packages:
- namespaces.append("google.cloud")
-
setuptools.setup(
name=name,
version=version,
@@ -83,13 +84,13 @@
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Operating System :: OS Independent",
"Topic :: Internet",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
python_requires=">=3.7",
- namespace_packages=namespaces,
install_requires=dependencies,
include_package_data=True,
zip_safe=False,
diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt
index ed7f9aed2..981d37ac6 100644
--- a/testing/constraints-3.10.txt
+++ b/testing/constraints-3.10.txt
@@ -2,5 +2,15 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
proto-plus
protobuf
+google-cloud-core
+google-cloud-appengine-logging
+google-cloud-audit-log
+grpc-google-iam-v1
+opentelemetry-api
+
+# optional dependencies
+django
+flask
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
index ed7f9aed2..981d37ac6 100644
--- a/testing/constraints-3.11.txt
+++ b/testing/constraints-3.11.txt
@@ -2,5 +2,15 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
proto-plus
protobuf
+google-cloud-core
+google-cloud-appengine-logging
+google-cloud-audit-log
+grpc-google-iam-v1
+opentelemetry-api
+
+# optional dependencies
+django
+flask
diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt
index ed7f9aed2..981d37ac6 100644
--- a/testing/constraints-3.12.txt
+++ b/testing/constraints-3.12.txt
@@ -2,5 +2,15 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
proto-plus
protobuf
+google-cloud-core
+google-cloud-appengine-logging
+google-cloud-audit-log
+grpc-google-iam-v1
+opentelemetry-api
+
+# optional dependencies
+django
+flask
diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt
new file mode 100644
index 000000000..981d37ac6
--- /dev/null
+++ b/testing/constraints-3.13.txt
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# This constraints file is required for unit tests.
+# List all library dependencies and extras in this file.
+google-api-core
+google-auth
+proto-plus
+protobuf
+google-cloud-core
+google-cloud-appengine-logging
+google-cloud-audit-log
+grpc-google-iam-v1
+opentelemetry-api
+
+# optional dependencies
+django
+flask
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
index 587626c54..113004889 100644
--- a/testing/constraints-3.7.txt
+++ b/testing/constraints-3.7.txt
@@ -4,7 +4,22 @@
# Pin the version to the lower bound.
# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev",
# Then this file should have google-cloud-foo==1.14.0
-google-api-core==1.33.2
+google-api-core==1.34.1
+google-auth==2.14.1
proto-plus==1.22.0
-protobuf==3.19.5
+protobuf==3.20.2
google-cloud-core==2.0.0
+google-cloud-appengine-logging==0.1.3
+google-cloud-audit-log==0.3.1
+grpc-google-iam-v1==0.12.4
+opentelemetry-api==1.9.0
+
+# Lower bound testing for optional dependencies
+django==3.2
+
+# Need specific versions of Flask dependencies for Flask 1.0 to work
+flask==1.0.0
+jinja2==2.10.1
+markupsafe==2.0.1
+itsdangerous==2.0.1
+werkzeug==1.0.1
\ No newline at end of file
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
index ed7f9aed2..76b620077 100644
--- a/testing/constraints-3.8.txt
+++ b/testing/constraints-3.8.txt
@@ -1,6 +1,22 @@
# -*- coding: utf-8 -*-
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
-google-api-core
-proto-plus
-protobuf
+google-api-core==2.14.0
+google-auth==2.14.1
+proto-plus==1.22.0
+protobuf==4.21.6
+google-cloud-core==2.0.0
+google-cloud-appengine-logging==0.1.3
+google-cloud-audit-log==0.3.1
+grpc-google-iam-v1==0.12.4
+opentelemetry-api==1.9.0
+
+# Lower bound testing for optional dependencies
+django==3.2
+
+# Need specific versions of Flask dependencies for Flask 1.0 to work
+flask==1.0.0
+jinja2==2.10.1
+markupsafe==2.0.1
+itsdangerous==2.0.1
+werkzeug==1.0.1
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
index ed7f9aed2..10c5cba87 100644
--- a/testing/constraints-3.9.txt
+++ b/testing/constraints-3.9.txt
@@ -2,5 +2,21 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
proto-plus
protobuf
+google-cloud-core
+google-cloud-appengine-logging
+google-cloud-audit-log
+grpc-google-iam-v1
+opentelemetry-api==1.9.0
+
+# Lower bound testing for optional dependencies
+django==3.2
+
+# Need specific versions of Flask dependencies for Flask 1.0 to work
+flask==1.0.0
+jinja2==2.10.1
+markupsafe==2.0.1
+itsdangerous==2.0.1
+werkzeug==1.0.1
diff --git a/tests/__init__.py b/tests/__init__.py
index 89a37dc92..cbf94b283 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/system/test_system.py b/tests/system/test_system.py
index 8d39408d3..487ecde62 100644
--- a/tests/system/test_system.py
+++ b/tests/system/test_system.py
@@ -19,6 +19,7 @@
import numbers
import os
import pytest
+import sys
import unittest
import uuid
@@ -33,6 +34,7 @@
import google.cloud.logging
from google.cloud._helpers import UTC
from google.cloud.logging_v2.handlers import CloudLoggingHandler
+from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport
from google.cloud.logging_v2.handlers.transports import SyncTransport
from google.cloud.logging_v2 import client
from google.cloud.logging_v2.resource import Resource
@@ -117,8 +119,26 @@ def setUpModule():
)
-class TestLogging(unittest.TestCase):
+def _cleanup_otel_sdk_modules(f):
+ """
+ Decorator to delete all references to opentelemetry SDK modules after a
+ testcase is run. Test case should import opentelemetry SDK modules inside
+ the function. This is to test situations where the opentelemetry SDK
+ is not imported at all.
+ """
+
+ def wrapped(*args, **kwargs):
+ f(*args, **kwargs)
+
+ # Deleting from sys.modules should be good enough in this use case
+ for module_name in list(sys.modules.keys()):
+ if module_name.startswith("opentelemetry.sdk"):
+ sys.modules.pop(module_name)
+
+ return wrapped
+
+class TestLogging(unittest.TestCase):
JSON_PAYLOAD = {
"message": "System test: test_log_struct",
"weather": {
@@ -193,6 +213,7 @@ def test_list_entry_with_auditlog(self):
"methodName": "test",
"resourceName": "test",
"serviceName": "test",
+ "requestMetadata": {"callerIp": "127.0.0.1"},
}
audit_struct = self._dict_to_struct(audit_dict)
@@ -224,6 +245,12 @@ def test_list_entry_with_auditlog(self):
protobuf_entry.to_api_repr()["protoPayload"]["methodName"],
audit_dict["methodName"],
)
+ self.assertEqual(
+ protobuf_entry.to_api_repr()["protoPayload"]["requestMetadata"][
+ "callerIp"
+ ],
+ audit_dict["requestMetadata"]["callerIp"],
+ )
def test_list_entry_with_requestlog(self):
"""
@@ -330,7 +357,7 @@ def test_log_text_with_timestamp(self):
text_payload = "System test: test_log_text_with_timestamp"
gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts"))
http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http"))
- now = datetime.utcnow()
+ now = datetime.now(timezone.utc)
loggers = (
[gapic_logger]
if Config.use_mtls == "always"
@@ -350,7 +377,7 @@ def test_log_text_with_resource(self):
gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res"))
http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http"))
- now = datetime.utcnow()
+ now = datetime.now(timezone.utc)
loggers = (
[gapic_logger]
if Config.use_mtls == "always"
@@ -596,10 +623,10 @@ def test_handlers_w_extras(self):
"trace_sampled": True,
"http_request": expected_request,
"source_location": expected_source,
- "resource": Resource(type="cloudiot_device", labels={}),
+ "resource": Resource(type="global", labels={}),
"labels": {"test-label": "manual"},
}
- cloud_logger.warn(LOG_MESSAGE, extra=extra)
+ cloud_logger.warning(LOG_MESSAGE, extra=extra)
entries = _list_entries(logger)
self.assertEqual(len(entries), 1)
@@ -628,7 +655,7 @@ def test_handlers_w_json_fields(self):
cloud_logger = logging.getLogger(LOGGER_NAME)
cloud_logger.addHandler(handler)
extra = {"json_fields": {"hello": "world", "two": 2}}
- cloud_logger.warn(LOG_MESSAGE, extra=extra)
+ cloud_logger.warning(LOG_MESSAGE, extra=extra)
entries = _list_entries(logger)
self.assertEqual(len(entries), 1)
@@ -656,6 +683,109 @@ def test_log_root_handler(self):
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0].payload, expected_payload)
+ @_cleanup_otel_sdk_modules
+ def test_log_handler_otel_integration(self):
+ # Doing OTel imports here to not taint the other tests with OTel SDK imports
+ from opentelemetry import trace
+ from opentelemetry.sdk.trace import TracerProvider
+
+ LOG_MESSAGE = "This is a test of OpenTelemetry"
+ LOGGER_NAME = "otel-integration"
+ handler_name = self._logger_name(LOGGER_NAME)
+
+ handler = CloudLoggingHandler(
+ Config.CLIENT, name=handler_name, transport=SyncTransport
+ )
+ # only create the logger to delete, hidden otherwise
+ logger = Config.CLIENT.logger(handler.name)
+ self.to_delete.append(logger)
+
+ # Set up OTel SDK
+ provider = TracerProvider()
+
+ tracer = provider.get_tracer("test_system")
+ with tracer.start_as_current_span("test-span") as span:
+ context = span.get_span_context()
+ expected_trace_id = f"projects/{Config.CLIENT.project}/traces/{trace.format_trace_id(context.trace_id)}"
+ expected_span_id = trace.format_span_id(context.span_id)
+ expected_tracesampled = context.trace_flags.sampled
+
+ cloud_logger = logging.getLogger(LOGGER_NAME)
+ cloud_logger.addHandler(handler)
+ cloud_logger.warning(LOG_MESSAGE)
+
+ entries = _list_entries(logger)
+ self.assertEqual(len(entries), 1)
+ self.assertEqual(entries[0].trace, expected_trace_id)
+ self.assertEqual(entries[0].span_id, expected_span_id)
+ self.assertTrue(entries[0].trace_sampled, expected_tracesampled)
+
+ def test_log_handler_close(self):
+ from multiprocessing import Process
+
+ LOG_MESSAGE = "This is a test of handler.close before exiting."
+ LOGGER_NAME = "close-test"
+ handler_name = self._logger_name(LOGGER_NAME)
+
+ # only create the logger to delete, hidden otherwise
+ logger = Config.CLIENT.logger(handler_name)
+ self.to_delete.append(logger)
+
+ # Run a simulation of logging an entry then immediately shutting down.
+ # The .close() function before the process exits should prevent the
+ # thread shutdown error and let us log the message.
+ def subprocess_main():
+ # logger.delete and logger.list_entries work by filtering on log name, so we
+ # can create new objects with the same name and have the queries on the parent
+ # process still work.
+ handler = CloudLoggingHandler(
+ Config.CLIENT, name=handler_name, transport=BackgroundThreadTransport
+ )
+ cloud_logger = logging.getLogger(LOGGER_NAME)
+ cloud_logger.addHandler(handler)
+ cloud_logger.warning(LOG_MESSAGE)
+ handler.close()
+
+ proc = Process(target=subprocess_main)
+ proc.start()
+ proc.join()
+ entries = _list_entries(logger)
+ self.assertEqual(len(entries), 1)
+ self.assertEqual(entries[0].payload, LOG_MESSAGE)
+
+ def test_log_client_flush_handlers(self):
+ from multiprocessing import Process
+
+ LOG_MESSAGE = "This is a test of client.flush_handlers before exiting."
+ LOGGER_NAME = "close-test"
+ handler_name = self._logger_name(LOGGER_NAME)
+
+ # only create the logger to delete, hidden otherwise
+ logger = Config.CLIENT.logger(handler_name)
+ self.to_delete.append(logger)
+
+ # Run a simulation of logging an entry then immediately shutting down.
+ # The .close() function before the process exits should prevent the
+ # thread shutdown error and let us log the message.
+ def subprocess_main():
+ # logger.delete and logger.list_entries work by filtering on log name, so we
+ # can create new objects with the same name and have the queries on the parent
+ # process still work.
+ handler = CloudLoggingHandler(
+ Config.CLIENT, name=handler_name, transport=BackgroundThreadTransport
+ )
+ cloud_logger = logging.getLogger(LOGGER_NAME)
+ cloud_logger.addHandler(handler)
+ cloud_logger.warning(LOG_MESSAGE)
+ Config.CLIENT.flush_handlers()
+
+ proc = Process(target=subprocess_main)
+ proc.start()
+ proc.join()
+ entries = _list_entries(logger)
+ self.assertEqual(len(entries), 1)
+ self.assertEqual(entries[0].payload, LOG_MESSAGE)
+
def test_create_metric(self):
METRIC_NAME = "test-create-metric%s" % (_RESOURCE_ID,)
metric = Config.CLIENT.metric(
@@ -807,7 +937,7 @@ def _init_bigquery_dataset(self):
# Stackdriver Logging to write into it.
retry = RetryErrors((TooManyRequests, BadGateway, ServiceUnavailable))
bigquery_client = bigquery.Client()
- dataset_ref = bigquery_client.dataset(dataset_name)
+ dataset_ref = bigquery.DatasetReference(Config.CLIENT.project, dataset_name)
dataset = retry(bigquery_client.create_dataset)(bigquery.Dataset(dataset_ref))
self.to_delete.append((bigquery_client, dataset))
bigquery_client.get_dataset(dataset)
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
index 89a37dc92..cbf94b283 100644
--- a/tests/unit/__init__.py
+++ b/tests/unit/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py
index 89a37dc92..cbf94b283 100644
--- a/tests/unit/gapic/__init__.py
+++ b/tests/unit/gapic/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/logging_v2/__init__.py b/tests/unit/gapic/logging_v2/__init__.py
index 89a37dc92..cbf94b283 100644
--- a/tests/unit/gapic/logging_v2/__init__.py
+++ b/tests/unit/gapic/logging_v2/__init__.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py
index 1af573470..73a8f5d32 100644
--- a/tests/unit/gapic/logging_v2/test_config_service_v2.py
+++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,11 +24,20 @@
import grpc
from grpc.experimental import aio
+import json
import math
import pytest
+from google.api_core import api_core_version
from proto.marshal.rules.dates import DurationRule, TimestampRule
from proto.marshal.rules import wrappers
+try:
+ from google.auth.aio import credentials as ga_credentials_async
+
+ HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH_AIO = False
+
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
@@ -39,6 +48,7 @@
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
+from google.api_core import retry as retries
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.logging_v2.services.config_service_v2 import (
@@ -56,10 +66,32 @@
import google.auth
+CRED_INFO_JSON = {
+ "credential_source": "/path/to/file",
+ "credential_type": "service account credentials",
+ "principal": "service-account@example.com",
+}
+CRED_INFO_STRING = json.dumps(CRED_INFO_JSON)
+
+
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+ if HAS_GOOGLE_AUTH_AIO:
+ return ga_credentials_async.AnonymousCredentials()
+ return ga_credentials.AnonymousCredentials()
+
+
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
@@ -71,6 +103,17 @@ def modify_default_endpoint(client):
)
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+ return (
+ "test.{UNIVERSE_DOMAIN}"
+ if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE)
+ else client._DEFAULT_ENDPOINT_TEMPLATE
+ )
+
+
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
@@ -100,6 +143,237 @@ def test__get_default_mtls_endpoint():
)
+def test__read_environment_variables():
+ assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None)
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+ assert ConfigServiceV2Client._read_environment_variables() == (
+ True,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+ assert ConfigServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError) as excinfo:
+ ConfigServiceV2Client._read_environment_variables()
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ assert ConfigServiceV2Client._read_environment_variables() == (
+ False,
+ "never",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ assert ConfigServiceV2Client._read_environment_variables() == (
+ False,
+ "always",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+ assert ConfigServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ ConfigServiceV2Client._read_environment_variables()
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+ assert ConfigServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ "foo.com",
+ )
+
+
+def test__get_client_cert_source():
+ mock_provided_cert_source = mock.Mock()
+ mock_default_cert_source = mock.Mock()
+
+ assert ConfigServiceV2Client._get_client_cert_source(None, False) is None
+ assert (
+ ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, False)
+ is None
+ )
+ assert (
+ ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, True)
+ == mock_provided_cert_source
+ )
+
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source", return_value=True
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=mock_default_cert_source,
+ ):
+ assert (
+ ConfigServiceV2Client._get_client_cert_source(None, True)
+ is mock_default_cert_source
+ )
+ assert (
+ ConfigServiceV2Client._get_client_cert_source(
+ mock_provided_cert_source, "true"
+ )
+ is mock_provided_cert_source
+ )
+
+
+@mock.patch.object(
+ ConfigServiceV2Client,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(ConfigServiceV2Client),
+)
+@mock.patch.object(
+ ConfigServiceV2AsyncClient,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(ConfigServiceV2AsyncClient),
+)
+def test__get_api_endpoint():
+ api_override = "foo.com"
+ mock_client_cert_source = mock.Mock()
+ default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE
+ default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=default_universe
+ )
+ mock_universe = "bar.com"
+ mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=mock_universe
+ )
+
+ assert (
+ ConfigServiceV2Client._get_api_endpoint(
+ api_override, mock_client_cert_source, default_universe, "always"
+ )
+ == api_override
+ )
+ assert (
+ ConfigServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, default_universe, "auto"
+ )
+ == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "auto")
+ == default_endpoint
+ )
+ assert (
+ ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "always")
+ == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ ConfigServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, default_universe, "always"
+ )
+ == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ ConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never")
+ == mock_endpoint
+ )
+ assert (
+ ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "never")
+ == default_endpoint
+ )
+
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ ConfigServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, mock_universe, "auto"
+ )
+ assert (
+ str(excinfo.value)
+ == "mTLS is not supported in any universe other than googleapis.com."
+ )
+
+
+def test__get_universe_domain():
+ client_universe_domain = "foo.com"
+ universe_domain_env = "bar.com"
+
+ assert (
+ ConfigServiceV2Client._get_universe_domain(
+ client_universe_domain, universe_domain_env
+ )
+ == client_universe_domain
+ )
+ assert (
+ ConfigServiceV2Client._get_universe_domain(None, universe_domain_env)
+ == universe_domain_env
+ )
+ assert (
+ ConfigServiceV2Client._get_universe_domain(None, None)
+ == ConfigServiceV2Client._DEFAULT_UNIVERSE
+ )
+
+ with pytest.raises(ValueError) as excinfo:
+ ConfigServiceV2Client._get_universe_domain("", None)
+ assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+
+@pytest.mark.parametrize(
+ "error_code,cred_info_json,show_cred_info",
+ [
+ (401, CRED_INFO_JSON, True),
+ (403, CRED_INFO_JSON, True),
+ (404, CRED_INFO_JSON, True),
+ (500, CRED_INFO_JSON, False),
+ (401, None, False),
+ (403, None, False),
+ (404, None, False),
+ (500, None, False),
+ ],
+)
+def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info):
+ cred = mock.Mock(["get_cred_info"])
+ cred.get_cred_info = mock.Mock(return_value=cred_info_json)
+ client = ConfigServiceV2Client(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=["foo"])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ if show_cred_info:
+ assert error.details == ["foo", CRED_INFO_STRING]
+ else:
+ assert error.details == ["foo"]
+
+
+@pytest.mark.parametrize("error_code", [401, 403, 404, 500])
+def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code):
+ cred = mock.Mock([])
+ assert not hasattr(cred, "get_cred_info")
+ client = ConfigServiceV2Client(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=[])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ assert error.details == []
+
+
@pytest.mark.parametrize(
"client_class,transport_name",
[
@@ -202,13 +476,13 @@ def test_config_service_v2_client_get_transport_class():
)
@mock.patch.object(
ConfigServiceV2Client,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(ConfigServiceV2Client),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(ConfigServiceV2Client),
)
@mock.patch.object(
ConfigServiceV2AsyncClient,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(ConfigServiceV2AsyncClient),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(ConfigServiceV2AsyncClient),
)
def test_config_service_v2_client_client_options(
client_class, transport_class, transport_name
@@ -250,7 +524,9 @@ def test_config_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -280,15 +556,23 @@ def test_config_service_v2_client_client_options(
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
client = client_class(transport=transport_name)
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
- with pytest.raises(ValueError):
+ with pytest.raises(ValueError) as excinfo:
client = client_class(transport=transport_name)
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
@@ -298,7 +582,9 @@ def test_config_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
@@ -316,7 +602,9 @@ def test_config_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -357,13 +645,13 @@ def test_config_service_v2_client_client_options(
)
@mock.patch.object(
ConfigServiceV2Client,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(ConfigServiceV2Client),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(ConfigServiceV2Client),
)
@mock.patch.object(
ConfigServiceV2AsyncClient,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(ConfigServiceV2AsyncClient),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(ConfigServiceV2AsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_config_service_v2_client_mtls_env_auto(
@@ -386,7 +674,9 @@ def test_config_service_v2_client_mtls_env_auto(
if use_client_cert_env == "false":
expected_client_cert_source = None
- expected_host = client.DEFAULT_ENDPOINT
+ expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ )
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
@@ -418,7 +708,9 @@ def test_config_service_v2_client_mtls_env_auto(
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
- expected_host = client.DEFAULT_ENDPOINT
+ expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ )
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
@@ -452,7 +744,9 @@ def test_config_service_v2_client_mtls_env_auto(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -542,6 +836,115 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ client_class.get_mtls_endpoint_and_cert_source()
+
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError) as excinfo:
+ client_class.get_mtls_endpoint_and_cert_source()
+
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient]
+)
+@mock.patch.object(
+ ConfigServiceV2Client,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(ConfigServiceV2Client),
+)
+@mock.patch.object(
+ ConfigServiceV2AsyncClient,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(ConfigServiceV2AsyncClient),
+)
+def test_config_service_v2_client_client_api_endpoint(client_class):
+ mock_client_cert_source = client_cert_source_callback
+ api_override = "foo.com"
+ default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE
+ default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=default_universe
+ )
+ mock_universe = "bar.com"
+ mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=mock_universe
+ )
+
+ # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+ # use ClientOptions.api_endpoint as the api endpoint regardless.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+ with mock.patch(
+ "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=mock_client_cert_source, api_endpoint=api_override
+ )
+ client = client_class(
+ client_options=options,
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ assert client.api_endpoint == api_override
+
+ # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ client = client_class(credentials=ga_credentials.AnonymousCredentials())
+ assert client.api_endpoint == default_endpoint
+
+ # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+ # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ client = client_class(credentials=ga_credentials.AnonymousCredentials())
+ assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+ # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+ # and ClientOptions.universe_domain="bar.com",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+ options = client_options.ClientOptions()
+ universe_exists = hasattr(options, "universe_domain")
+ if universe_exists:
+ options = client_options.ClientOptions(universe_domain=mock_universe)
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ else:
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert client.api_endpoint == (
+ mock_endpoint if universe_exists else default_endpoint
+ )
+ assert client.universe_domain == (
+ mock_universe if universe_exists else default_universe
+ )
+
+ # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+ options = client_options.ClientOptions()
+ if hasattr(options, "universe_domain"):
+ delattr(options, "universe_domain")
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert client.api_endpoint == default_endpoint
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -567,7 +970,9 @@ def test_config_service_v2_client_client_options_scopes(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -606,7 +1011,9 @@ def test_config_service_v2_client_client_options_credentials_file(
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -666,7 +1073,9 @@ def test_config_service_v2_client_create_channel_credentials_file(
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -737,27 +1146,119 @@ def test_list_buckets(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListBucketsRequest()
+ request = logging_config.ListBucketsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListBucketsPager)
assert response.next_page_token == "next_page_token_value"
-def test_list_buckets_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_buckets_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.ListBucketsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_buckets), "__call__") as call:
- client.list_buckets()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_buckets(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListBucketsRequest()
+ assert args[0] == logging_config.ListBucketsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
+
+def test_list_buckets_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.list_buckets in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc
+ request = {}
+ client.list_buckets(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_buckets(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_buckets_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_buckets
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_buckets
+ ] = mock_rpc
+
+ request = {}
+ await client.list_buckets(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_buckets(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -765,7 +1266,7 @@ async def test_list_buckets_async(
transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -786,7 +1287,8 @@ async def test_list_buckets_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListBucketsRequest()
+ request = logging_config.ListBucketsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListBucketsAsyncPager)
@@ -830,7 +1332,7 @@ def test_list_buckets_field_headers():
@pytest.mark.asyncio
async def test_list_buckets_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -900,7 +1402,7 @@ def test_list_buckets_flattened_error():
@pytest.mark.asyncio
async def test_list_buckets_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -929,7 +1431,7 @@ async def test_list_buckets_flattened_async():
@pytest.mark.asyncio
async def test_list_buckets_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -943,7 +1445,7 @@ async def test_list_buckets_flattened_error_async():
def test_list_buckets_pager(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -978,13 +1480,17 @@ def test_list_buckets_pager(transport_name: str = "grpc"):
RuntimeError,
)
- metadata = ()
- metadata = tuple(metadata) + (
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ expected_metadata = tuple(expected_metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
- pager = client.list_buckets(request={})
+ pager = client.list_buckets(request={}, retry=retry, timeout=timeout)
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -993,7 +1499,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"):
def test_list_buckets_pages(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -1035,7 +1541,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_buckets_async_pager():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1085,7 +1591,7 @@ async def test_list_buckets_async_pager():
@pytest.mark.asyncio
async def test_list_buckets_async_pages():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1165,7 +1671,8 @@ def test_get_bucket(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetBucketRequest()
+ request = logging_config.GetBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogBucket)
@@ -1178,44 +1685,131 @@ def test_get_bucket(request_type, transport: str = "grpc"):
assert response.restricted_fields == ["restricted_fields_value"]
-def test_get_bucket_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_get_bucket_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.GetBucketRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_bucket), "__call__") as call:
- client.get_bucket()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.get_bucket(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetBucketRequest()
+ assert args[0] == logging_config.GetBucketRequest(
+ name="name_value",
+ )
-@pytest.mark.asyncio
-async def test_get_bucket_async(
- transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest
-):
- client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+def test_get_bucket_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = request_type()
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.get_bucket), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- logging_config.LogBucket(
- name="name_value",
- description="description_value",
- retention_days=1512,
- locked=True,
+ # Ensure method has been cached
+ assert client._transport.get_bucket in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc
+ request = {}
+ client.get_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.get_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.get_bucket
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.get_bucket
+ ] = mock_rpc
+
+ request = {}
+ await client.get_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.get_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_bucket_async(
+ transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest
+):
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_bucket), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogBucket(
+ name="name_value",
+ description="description_value",
+ retention_days=1512,
+ locked=True,
lifecycle_state=logging_config.LifecycleState.ACTIVE,
analytics_enabled=True,
restricted_fields=["restricted_fields_value"],
@@ -1226,7 +1820,8 @@ async def test_get_bucket_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetBucketRequest()
+ request = logging_config.GetBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogBucket)
@@ -1276,7 +1871,7 @@ def test_get_bucket_field_headers():
@pytest.mark.asyncio
async def test_get_bucket_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1333,28 +1928,134 @@ def test_create_bucket_async(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateBucketRequest()
+ request = logging_config.CreateBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
-def test_create_bucket_async_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_create_bucket_async_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.CreateBucketRequest(
+ parent="parent_value",
+ bucket_id="bucket_id_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_bucket_async), "__call__"
) as call:
- client.create_bucket_async()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.create_bucket_async(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateBucketRequest()
+ assert args[0] == logging_config.CreateBucketRequest(
+ parent="parent_value",
+ bucket_id="bucket_id_value",
+ )
+
+
+def test_create_bucket_async_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._transport.create_bucket_async in client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.create_bucket_async
+ ] = mock_rpc
+ request = {}
+ client.create_bucket_async(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ client.create_bucket_async(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_create_bucket_async_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.create_bucket_async
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.create_bucket_async
+ ] = mock_rpc
+
+ request = {}
+ await client.create_bucket_async(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ await client.create_bucket_async(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1362,7 +2063,7 @@ async def test_create_bucket_async_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1383,7 +2084,8 @@ async def test_create_bucket_async_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateBucketRequest()
+ request = logging_config.CreateBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@@ -1428,7 +2130,7 @@ def test_create_bucket_async_field_headers():
@pytest.mark.asyncio
async def test_create_bucket_async_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1487,28 +2189,132 @@ def test_update_bucket_async(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateBucketRequest()
+ request = logging_config.UpdateBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
-def test_update_bucket_async_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_update_bucket_async_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.UpdateBucketRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_bucket_async), "__call__"
) as call:
- client.update_bucket_async()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.update_bucket_async(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateBucketRequest()
+ assert args[0] == logging_config.UpdateBucketRequest(
+ name="name_value",
+ )
+
+
+def test_update_bucket_async_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._transport.update_bucket_async in client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.update_bucket_async
+ ] = mock_rpc
+ request = {}
+ client.update_bucket_async(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ client.update_bucket_async(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_update_bucket_async_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.update_bucket_async
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.update_bucket_async
+ ] = mock_rpc
+
+ request = {}
+ await client.update_bucket_async(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ await client.update_bucket_async(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1516,7 +2322,7 @@ async def test_update_bucket_async_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1537,7 +2343,8 @@ async def test_update_bucket_async_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateBucketRequest()
+ request = logging_config.UpdateBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@@ -1582,7 +2389,7 @@ def test_update_bucket_async_field_headers():
@pytest.mark.asyncio
async def test_update_bucket_async_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1647,7 +2454,8 @@ def test_create_bucket(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateBucketRequest()
+ request = logging_config.CreateBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogBucket)
@@ -1660,20 +2468,111 @@ def test_create_bucket(request_type, transport: str = "grpc"):
assert response.restricted_fields == ["restricted_fields_value"]
-def test_create_bucket_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_create_bucket_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.CreateBucketRequest(
+ parent="parent_value",
+ bucket_id="bucket_id_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_bucket), "__call__") as call:
- client.create_bucket()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.create_bucket(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateBucketRequest()
+ assert args[0] == logging_config.CreateBucketRequest(
+ parent="parent_value",
+ bucket_id="bucket_id_value",
+ )
+
+
+def test_create_bucket_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.create_bucket in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc
+ request = {}
+ client.create_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.create_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_create_bucket_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.create_bucket
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.create_bucket
+ ] = mock_rpc
+
+ request = {}
+ await client.create_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.create_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1681,7 +2580,7 @@ async def test_create_bucket_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1708,7 +2607,8 @@ async def test_create_bucket_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateBucketRequest()
+ request = logging_config.CreateBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogBucket)
@@ -1758,7 +2658,7 @@ def test_create_bucket_field_headers():
@pytest.mark.asyncio
async def test_create_bucket_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1821,7 +2721,8 @@ def test_update_bucket(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateBucketRequest()
+ request = logging_config.UpdateBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogBucket)
@@ -1834,20 +2735,109 @@ def test_update_bucket(request_type, transport: str = "grpc"):
assert response.restricted_fields == ["restricted_fields_value"]
-def test_update_bucket_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_update_bucket_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.UpdateBucketRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_bucket), "__call__") as call:
- client.update_bucket()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.update_bucket(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateBucketRequest()
+ assert args[0] == logging_config.UpdateBucketRequest(
+ name="name_value",
+ )
+
+
+def test_update_bucket_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.update_bucket in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc
+ request = {}
+ client.update_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.update_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_update_bucket_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.update_bucket
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.update_bucket
+ ] = mock_rpc
+
+ request = {}
+ await client.update_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.update_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1855,7 +2845,7 @@ async def test_update_bucket_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1882,7 +2872,8 @@ async def test_update_bucket_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateBucketRequest()
+ request = logging_config.UpdateBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogBucket)
@@ -1932,7 +2923,7 @@ def test_update_bucket_field_headers():
@pytest.mark.asyncio
async def test_update_bucket_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1987,54 +2978,145 @@ def test_delete_bucket(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteBucketRequest()
+ request = logging_config.DeleteBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
-def test_delete_bucket_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_delete_bucket_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.DeleteBucketRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call:
- client.delete_bucket()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.delete_bucket(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteBucketRequest()
+ assert args[0] == logging_config.DeleteBucketRequest(
+ name="name_value",
+ )
-@pytest.mark.asyncio
-async def test_delete_bucket_async(
- transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest
-):
- client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+def test_delete_bucket_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = request_type()
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
- response = await client.delete_bucket(request)
+ # Ensure method has been cached
+ assert client._transport.delete_bucket in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc
+ request = {}
+ client.delete_bucket(request)
# Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls)
- _, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteBucketRequest()
+ assert mock_rpc.call_count == 1
- # Establish that the response is the type that we expect.
- assert response is None
+ client.delete_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_delete_bucket_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.delete_bucket
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.delete_bucket
+ ] = mock_rpc
+
+ request = {}
+ await client.delete_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.delete_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_delete_bucket_async(
+ transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest
+):
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ response = await client.delete_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ request = logging_config.DeleteBucketRequest()
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert response is None
@pytest.mark.asyncio
@@ -2074,7 +3156,7 @@ def test_delete_bucket_field_headers():
@pytest.mark.asyncio
async def test_delete_bucket_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2127,26 +3209,116 @@ def test_undelete_bucket(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UndeleteBucketRequest()
+ request = logging_config.UndeleteBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
-def test_undelete_bucket_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_undelete_bucket_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.UndeleteBucketRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call:
- client.undelete_bucket()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.undelete_bucket(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UndeleteBucketRequest()
+ assert args[0] == logging_config.UndeleteBucketRequest(
+ name="name_value",
+ )
+
+
+def test_undelete_bucket_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.undelete_bucket in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc
+ request = {}
+ client.undelete_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.undelete_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_undelete_bucket_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.undelete_bucket
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.undelete_bucket
+ ] = mock_rpc
+
+ request = {}
+ await client.undelete_bucket(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.undelete_bucket(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -2154,7 +3326,7 @@ async def test_undelete_bucket_async(
transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2171,7 +3343,8 @@ async def test_undelete_bucket_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UndeleteBucketRequest()
+ request = logging_config.UndeleteBucketRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
@@ -2214,7 +3387,7 @@ def test_undelete_bucket_field_headers():
@pytest.mark.asyncio
async def test_undelete_bucket_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2269,27 +3442,117 @@ def test_list_views(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListViewsRequest()
+ request = logging_config.ListViewsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListViewsPager)
assert response.next_page_token == "next_page_token_value"
-def test_list_views_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_views_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.ListViewsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_views), "__call__") as call:
- client.list_views()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_views(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListViewsRequest()
+ assert args[0] == logging_config.ListViewsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
+
+def test_list_views_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.list_views in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.list_views] = mock_rpc
+ request = {}
+ client.list_views(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_views(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_views
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_views
+ ] = mock_rpc
+
+ request = {}
+ await client.list_views(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_views(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -2297,7 +3560,7 @@ async def test_list_views_async(
transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2318,7 +3581,8 @@ async def test_list_views_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListViewsRequest()
+ request = logging_config.ListViewsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListViewsAsyncPager)
@@ -2362,7 +3626,7 @@ def test_list_views_field_headers():
@pytest.mark.asyncio
async def test_list_views_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2432,7 +3696,7 @@ def test_list_views_flattened_error():
@pytest.mark.asyncio
async def test_list_views_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2461,7 +3725,7 @@ async def test_list_views_flattened_async():
@pytest.mark.asyncio
async def test_list_views_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2475,7 +3739,7 @@ async def test_list_views_flattened_error_async():
def test_list_views_pager(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -2510,13 +3774,17 @@ def test_list_views_pager(transport_name: str = "grpc"):
RuntimeError,
)
- metadata = ()
- metadata = tuple(metadata) + (
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ expected_metadata = tuple(expected_metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
- pager = client.list_views(request={})
+ pager = client.list_views(request={}, retry=retry, timeout=timeout)
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -2525,7 +3793,7 @@ def test_list_views_pager(transport_name: str = "grpc"):
def test_list_views_pages(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -2567,7 +3835,7 @@ def test_list_views_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_views_async_pager():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2617,7 +3885,7 @@ async def test_list_views_async_pager():
@pytest.mark.asyncio
async def test_list_views_async_pages():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2693,7 +3961,8 @@ def test_get_view(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetViewRequest()
+ request = logging_config.GetViewRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogView)
@@ -2702,20 +3971,107 @@ def test_get_view(request_type, transport: str = "grpc"):
assert response.filter == "filter_value"
-def test_get_view_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_get_view_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.GetViewRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_view), "__call__") as call:
- client.get_view()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.get_view(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetViewRequest()
+ assert args[0] == logging_config.GetViewRequest(
+ name="name_value",
+ )
+
+
+def test_get_view_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.get_view in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.get_view] = mock_rpc
+ request = {}
+ client.get_view(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.get_view(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.get_view
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.get_view
+ ] = mock_rpc
+
+ request = {}
+ await client.get_view(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.get_view(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -2723,7 +4079,7 @@ async def test_get_view_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2746,7 +4102,8 @@ async def test_get_view_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetViewRequest()
+ request = logging_config.GetViewRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogView)
@@ -2792,7 +4149,7 @@ def test_get_view_field_headers():
@pytest.mark.asyncio
async def test_get_view_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2851,7 +4208,8 @@ def test_create_view(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateViewRequest()
+ request = logging_config.CreateViewRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogView)
@@ -2860,20 +4218,111 @@ def test_create_view(request_type, transport: str = "grpc"):
assert response.filter == "filter_value"
-def test_create_view_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_create_view_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.CreateViewRequest(
+ parent="parent_value",
+ view_id="view_id_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_view), "__call__") as call:
- client.create_view()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.create_view(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateViewRequest()
+ assert args[0] == logging_config.CreateViewRequest(
+ parent="parent_value",
+ view_id="view_id_value",
+ )
+
+
+def test_create_view_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.create_view in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.create_view] = mock_rpc
+ request = {}
+ client.create_view(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.create_view(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_create_view_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.create_view
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.create_view
+ ] = mock_rpc
+
+ request = {}
+ await client.create_view(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.create_view(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -2881,7 +4330,7 @@ async def test_create_view_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2904,7 +4353,8 @@ async def test_create_view_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateViewRequest()
+ request = logging_config.CreateViewRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogView)
@@ -2950,7 +4400,7 @@ def test_create_view_field_headers():
@pytest.mark.asyncio
async def test_create_view_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3009,7 +4459,8 @@ def test_update_view(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateViewRequest()
+ request = logging_config.UpdateViewRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogView)
@@ -3018,20 +4469,109 @@ def test_update_view(request_type, transport: str = "grpc"):
assert response.filter == "filter_value"
-def test_update_view_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_update_view_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.UpdateViewRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_view), "__call__") as call:
- client.update_view()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.update_view(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateViewRequest()
+ assert args[0] == logging_config.UpdateViewRequest(
+ name="name_value",
+ )
+
+
+def test_update_view_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.update_view in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.update_view] = mock_rpc
+ request = {}
+ client.update_view(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.update_view(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_update_view_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.update_view
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.update_view
+ ] = mock_rpc
+
+ request = {}
+ await client.update_view(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.update_view(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -3039,7 +4579,7 @@ async def test_update_view_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3062,7 +4602,8 @@ async def test_update_view_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateViewRequest()
+ request = logging_config.UpdateViewRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogView)
@@ -3108,7 +4649,7 @@ def test_update_view_field_headers():
@pytest.mark.asyncio
async def test_update_view_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3163,26 +4704,116 @@ def test_delete_view(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteViewRequest()
+ request = logging_config.DeleteViewRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
-def test_delete_view_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_delete_view_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.DeleteViewRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_view), "__call__") as call:
- client.delete_view()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.delete_view(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteViewRequest()
+ assert args[0] == logging_config.DeleteViewRequest(
+ name="name_value",
+ )
+
+
+def test_delete_view_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.delete_view in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc
+ request = {}
+ client.delete_view(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.delete_view(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_delete_view_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.delete_view
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.delete_view
+ ] = mock_rpc
+
+ request = {}
+ await client.delete_view(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.delete_view(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -3190,7 +4821,7 @@ async def test_delete_view_async(
transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3207,7 +4838,8 @@ async def test_delete_view_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteViewRequest()
+ request = logging_config.DeleteViewRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
@@ -3250,7 +4882,7 @@ def test_delete_view_field_headers():
@pytest.mark.asyncio
async def test_delete_view_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3305,27 +4937,117 @@ def test_list_sinks(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListSinksRequest()
+ request = logging_config.ListSinksRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListSinksPager)
assert response.next_page_token == "next_page_token_value"
-def test_list_sinks_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_sinks_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.ListSinksRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_sinks), "__call__") as call:
- client.list_sinks()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_sinks(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListSinksRequest()
+ assert args[0] == logging_config.ListSinksRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
+
+def test_list_sinks_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.list_sinks in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc
+ request = {}
+ client.list_sinks(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_sinks(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_sinks
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_sinks
+ ] = mock_rpc
+
+ request = {}
+ await client.list_sinks(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_sinks(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -3333,7 +5055,7 @@ async def test_list_sinks_async(
transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3354,7 +5076,8 @@ async def test_list_sinks_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListSinksRequest()
+ request = logging_config.ListSinksRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListSinksAsyncPager)
@@ -3398,7 +5121,7 @@ def test_list_sinks_field_headers():
@pytest.mark.asyncio
async def test_list_sinks_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3468,7 +5191,7 @@ def test_list_sinks_flattened_error():
@pytest.mark.asyncio
async def test_list_sinks_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3497,7 +5220,7 @@ async def test_list_sinks_flattened_async():
@pytest.mark.asyncio
async def test_list_sinks_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -3511,7 +5234,7 @@ async def test_list_sinks_flattened_error_async():
def test_list_sinks_pager(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -3546,13 +5269,17 @@ def test_list_sinks_pager(transport_name: str = "grpc"):
RuntimeError,
)
- metadata = ()
- metadata = tuple(metadata) + (
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ expected_metadata = tuple(expected_metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
- pager = client.list_sinks(request={})
+ pager = client.list_sinks(request={}, retry=retry, timeout=timeout)
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -3561,7 +5288,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"):
def test_list_sinks_pages(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -3603,7 +5330,7 @@ def test_list_sinks_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_sinks_async_pager():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3653,7 +5380,7 @@ async def test_list_sinks_async_pager():
@pytest.mark.asyncio
async def test_list_sinks_async_pages():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3734,7 +5461,8 @@ def test_get_sink(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetSinkRequest()
+ request = logging_config.GetSinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogSink)
@@ -3748,20 +5476,107 @@ def test_get_sink(request_type, transport: str = "grpc"):
assert response.include_children is True
-def test_get_sink_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_get_sink_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.GetSinkRequest(
+ sink_name="sink_name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_sink), "__call__") as call:
- client.get_sink()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.get_sink(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetSinkRequest()
+ assert args[0] == logging_config.GetSinkRequest(
+ sink_name="sink_name_value",
+ )
+
+
+def test_get_sink_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.get_sink in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc
+ request = {}
+ client.get_sink(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.get_sink(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.get_sink
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.get_sink
+ ] = mock_rpc
+
+ request = {}
+ await client.get_sink(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.get_sink(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -3769,7 +5584,7 @@ async def test_get_sink_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3797,7 +5612,8 @@ async def test_get_sink_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetSinkRequest()
+ request = logging_config.GetSinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogSink)
@@ -3848,7 +5664,7 @@ def test_get_sink_field_headers():
@pytest.mark.asyncio
async def test_get_sink_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3918,7 +5734,7 @@ def test_get_sink_flattened_error():
@pytest.mark.asyncio
async def test_get_sink_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -3947,7 +5763,7 @@ async def test_get_sink_flattened_async():
@pytest.mark.asyncio
async def test_get_sink_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -3994,7 +5810,8 @@ def test_create_sink(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateSinkRequest()
+ request = logging_config.CreateSinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogSink)
@@ -4008,20 +5825,109 @@ def test_create_sink(request_type, transport: str = "grpc"):
assert response.include_children is True
-def test_create_sink_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_create_sink_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.CreateSinkRequest(
+ parent="parent_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_sink), "__call__") as call:
- client.create_sink()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.create_sink(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateSinkRequest()
+ assert args[0] == logging_config.CreateSinkRequest(
+ parent="parent_value",
+ )
+
+
+def test_create_sink_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.create_sink in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc
+ request = {}
+ client.create_sink(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.create_sink(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_create_sink_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.create_sink
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.create_sink
+ ] = mock_rpc
+
+ request = {}
+ await client.create_sink(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.create_sink(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -4029,7 +5935,7 @@ async def test_create_sink_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -4057,7 +5963,8 @@ async def test_create_sink_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateSinkRequest()
+ request = logging_config.CreateSinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogSink)
@@ -4108,7 +6015,7 @@ def test_create_sink_field_headers():
@pytest.mark.asyncio
async def test_create_sink_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -4183,7 +6090,7 @@ def test_create_sink_flattened_error():
@pytest.mark.asyncio
async def test_create_sink_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4216,7 +6123,7 @@ async def test_create_sink_flattened_async():
@pytest.mark.asyncio
async def test_create_sink_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -4264,7 +6171,8 @@ def test_update_sink(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateSinkRequest()
+ request = logging_config.UpdateSinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogSink)
@@ -4278,20 +6186,109 @@ def test_update_sink(request_type, transport: str = "grpc"):
assert response.include_children is True
-def test_update_sink_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_update_sink_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.UpdateSinkRequest(
+ sink_name="sink_name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_sink), "__call__") as call:
- client.update_sink()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.update_sink(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateSinkRequest()
+ assert args[0] == logging_config.UpdateSinkRequest(
+ sink_name="sink_name_value",
+ )
+
+
+def test_update_sink_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.update_sink in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc
+ request = {}
+ client.update_sink(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.update_sink(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_update_sink_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.update_sink
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.update_sink
+ ] = mock_rpc
+
+ request = {}
+ await client.update_sink(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.update_sink(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -4299,7 +6296,7 @@ async def test_update_sink_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -4327,7 +6324,8 @@ async def test_update_sink_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateSinkRequest()
+ request = logging_config.UpdateSinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogSink)
@@ -4378,7 +6376,7 @@ def test_update_sink_field_headers():
@pytest.mark.asyncio
async def test_update_sink_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -4458,7 +6456,7 @@ def test_update_sink_flattened_error():
@pytest.mark.asyncio
async def test_update_sink_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4495,7 +6493,7 @@ async def test_update_sink_flattened_async():
@pytest.mark.asyncio
async def test_update_sink_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -4535,63 +6533,154 @@ def test_delete_sink(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteSinkRequest()
+ request = logging_config.DeleteSinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
-def test_delete_sink_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_delete_sink_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.DeleteSinkRequest(
+ sink_name="sink_name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_sink), "__call__") as call:
- client.delete_sink()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.delete_sink(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteSinkRequest()
+ assert args[0] == logging_config.DeleteSinkRequest(
+ sink_name="sink_name_value",
+ )
-@pytest.mark.asyncio
-async def test_delete_sink_async(
- transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest
-):
- client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+def test_delete_sink_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = request_type()
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.delete_sink), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
- response = await client.delete_sink(request)
+ # Ensure method has been cached
+ assert client._transport.delete_sink in client._transport._wrapped_methods
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls)
- _, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteSinkRequest()
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc
+ request = {}
+ client.delete_sink(request)
- # Establish that the response is the type that we expect.
- assert response is None
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+ client.delete_sink(request)
-@pytest.mark.asyncio
-async def test_delete_sink_async_from_dict():
- await test_delete_sink_async(request_type=dict)
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
-def test_delete_sink_field_headers():
- client = ConfigServiceV2Client(
+@pytest.mark.asyncio
+async def test_delete_sink_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.delete_sink
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.delete_sink
+ ] = mock_rpc
+
+ request = {}
+ await client.delete_sink(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.delete_sink(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_delete_sink_async(
+ transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest
+):
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_sink), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ response = await client.delete_sink(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ request = logging_config.DeleteSinkRequest()
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_sink_async_from_dict():
+ await test_delete_sink_async(request_type=dict)
+
+
+def test_delete_sink_field_headers():
+ client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
)
@@ -4622,7 +6711,7 @@ def test_delete_sink_field_headers():
@pytest.mark.asyncio
async def test_delete_sink_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -4690,7 +6779,7 @@ def test_delete_sink_flattened_error():
@pytest.mark.asyncio
async def test_delete_sink_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4717,7 +6806,7 @@ async def test_delete_sink_flattened_async():
@pytest.mark.asyncio
async def test_delete_sink_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -4755,26 +6844,128 @@ def test_create_link(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateLinkRequest()
+ request = logging_config.CreateLinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
-def test_create_link_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_create_link_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.CreateLinkRequest(
+ parent="parent_value",
+ link_id="link_id_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_link), "__call__") as call:
- client.create_link()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.create_link(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateLinkRequest()
+ assert args[0] == logging_config.CreateLinkRequest(
+ parent="parent_value",
+ link_id="link_id_value",
+ )
+
+
+def test_create_link_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.create_link in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.create_link] = mock_rpc
+ request = {}
+ client.create_link(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ client.create_link(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_create_link_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.create_link
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.create_link
+ ] = mock_rpc
+
+ request = {}
+ await client.create_link(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ await client.create_link(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -4782,7 +6973,7 @@ async def test_create_link_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateLinkRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -4801,7 +6992,8 @@ async def test_create_link_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateLinkRequest()
+ request = logging_config.CreateLinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@@ -4844,7 +7036,7 @@ def test_create_link_field_headers():
@pytest.mark.asyncio
async def test_create_link_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -4924,7 +7116,7 @@ def test_create_link_flattened_error():
@pytest.mark.asyncio
async def test_create_link_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -4961,7 +7153,7 @@ async def test_create_link_flattened_async():
@pytest.mark.asyncio
async def test_create_link_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -5001,26 +7193,126 @@ def test_delete_link(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteLinkRequest()
+ request = logging_config.DeleteLinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
-def test_delete_link_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_delete_link_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.DeleteLinkRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_link), "__call__") as call:
- client.delete_link()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.delete_link(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteLinkRequest()
+ assert args[0] == logging_config.DeleteLinkRequest(
+ name="name_value",
+ )
+
+
+def test_delete_link_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.delete_link in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc
+ request = {}
+ client.delete_link(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ client.delete_link(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_delete_link_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.delete_link
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.delete_link
+ ] = mock_rpc
+
+ request = {}
+ await client.delete_link(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ await client.delete_link(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -5028,7 +7320,7 @@ async def test_delete_link_async(
transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -5047,7 +7339,8 @@ async def test_delete_link_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteLinkRequest()
+ request = logging_config.DeleteLinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@@ -5090,7 +7383,7 @@ def test_delete_link_field_headers():
@pytest.mark.asyncio
async def test_delete_link_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -5160,7 +7453,7 @@ def test_delete_link_flattened_error():
@pytest.mark.asyncio
async def test_delete_link_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -5189,7 +7482,7 @@ async def test_delete_link_flattened_async():
@pytest.mark.asyncio
async def test_delete_link_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -5229,27 +7522,117 @@ def test_list_links(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListLinksRequest()
+ request = logging_config.ListLinksRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListLinksPager)
assert response.next_page_token == "next_page_token_value"
-def test_list_links_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_links_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.ListLinksRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_links), "__call__") as call:
- client.list_links()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_links(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListLinksRequest()
+ assert args[0] == logging_config.ListLinksRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
+
+def test_list_links_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.list_links in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.list_links] = mock_rpc
+ request = {}
+ client.list_links(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_links(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_links
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_links
+ ] = mock_rpc
+
+ request = {}
+ await client.list_links(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_links(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -5257,7 +7640,7 @@ async def test_list_links_async(
transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -5278,7 +7661,8 @@ async def test_list_links_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListLinksRequest()
+ request = logging_config.ListLinksRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListLinksAsyncPager)
@@ -5322,7 +7706,7 @@ def test_list_links_field_headers():
@pytest.mark.asyncio
async def test_list_links_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -5392,7 +7776,7 @@ def test_list_links_flattened_error():
@pytest.mark.asyncio
async def test_list_links_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -5421,7 +7805,7 @@ async def test_list_links_flattened_async():
@pytest.mark.asyncio
async def test_list_links_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -5435,7 +7819,7 @@ async def test_list_links_flattened_error_async():
def test_list_links_pager(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -5470,13 +7854,17 @@ def test_list_links_pager(transport_name: str = "grpc"):
RuntimeError,
)
- metadata = ()
- metadata = tuple(metadata) + (
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ expected_metadata = tuple(expected_metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
- pager = client.list_links(request={})
+ pager = client.list_links(request={}, retry=retry, timeout=timeout)
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -5485,7 +7873,7 @@ def test_list_links_pager(transport_name: str = "grpc"):
def test_list_links_pages(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -5527,7 +7915,7 @@ def test_list_links_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_links_async_pager():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -5577,7 +7965,7 @@ async def test_list_links_async_pager():
@pytest.mark.asyncio
async def test_list_links_async_pages():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -5653,7 +8041,8 @@ def test_get_link(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetLinkRequest()
+ request = logging_config.GetLinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.Link)
@@ -5662,20 +8051,107 @@ def test_get_link(request_type, transport: str = "grpc"):
assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE
-def test_get_link_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_get_link_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.GetLinkRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_link), "__call__") as call:
- client.get_link()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.get_link(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetLinkRequest()
+ assert args[0] == logging_config.GetLinkRequest(
+ name="name_value",
+ )
+
+
+def test_get_link_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.get_link in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.get_link] = mock_rpc
+ request = {}
+ client.get_link(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.get_link(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.get_link
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.get_link
+ ] = mock_rpc
+
+ request = {}
+ await client.get_link(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.get_link(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -5683,7 +8159,7 @@ async def test_get_link_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -5706,7 +8182,8 @@ async def test_get_link_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetLinkRequest()
+ request = logging_config.GetLinkRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.Link)
@@ -5752,7 +8229,7 @@ def test_get_link_field_headers():
@pytest.mark.asyncio
async def test_get_link_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -5820,7 +8297,7 @@ def test_get_link_flattened_error():
@pytest.mark.asyncio
async def test_get_link_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -5847,7 +8324,7 @@ async def test_get_link_flattened_async():
@pytest.mark.asyncio
async def test_get_link_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -5887,44 +8364,136 @@ def test_list_exclusions(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListExclusionsRequest()
+ request = logging_config.ListExclusionsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListExclusionsPager)
assert response.next_page_token == "next_page_token_value"
-def test_list_exclusions_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_exclusions_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.ListExclusionsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call:
- client.list_exclusions()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_exclusions(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListExclusionsRequest()
+ assert args[0] == logging_config.ListExclusionsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
-@pytest.mark.asyncio
-async def test_list_exclusions_async(
- transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest
-):
- client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+def test_list_exclusions_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = request_type()
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call:
+ # Ensure method has been cached
+ assert client._transport.list_exclusions in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc
+ request = {}
+ client.list_exclusions(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_exclusions(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_exclusions_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_exclusions
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_exclusions
+ ] = mock_rpc
+
+ request = {}
+ await client.list_exclusions(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_exclusions(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_exclusions_async(
+ transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest
+):
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
logging_config.ListExclusionsResponse(
@@ -5936,7 +8505,8 @@ async def test_list_exclusions_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.ListExclusionsRequest()
+ request = logging_config.ListExclusionsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListExclusionsAsyncPager)
@@ -5980,7 +8550,7 @@ def test_list_exclusions_field_headers():
@pytest.mark.asyncio
async def test_list_exclusions_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -6050,7 +8620,7 @@ def test_list_exclusions_flattened_error():
@pytest.mark.asyncio
async def test_list_exclusions_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -6079,7 +8649,7 @@ async def test_list_exclusions_flattened_async():
@pytest.mark.asyncio
async def test_list_exclusions_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -6093,7 +8663,7 @@ async def test_list_exclusions_flattened_error_async():
def test_list_exclusions_pager(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -6128,13 +8698,17 @@ def test_list_exclusions_pager(transport_name: str = "grpc"):
RuntimeError,
)
- metadata = ()
- metadata = tuple(metadata) + (
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ expected_metadata = tuple(expected_metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
- pager = client.list_exclusions(request={})
+ pager = client.list_exclusions(request={}, retry=retry, timeout=timeout)
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -6143,7 +8717,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"):
def test_list_exclusions_pages(transport_name: str = "grpc"):
client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -6185,7 +8759,7 @@ def test_list_exclusions_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_exclusions_async_pager():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -6235,7 +8809,7 @@ async def test_list_exclusions_async_pager():
@pytest.mark.asyncio
async def test_list_exclusions_async_pages():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -6312,7 +8886,8 @@ def test_get_exclusion(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetExclusionRequest()
+ request = logging_config.GetExclusionRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogExclusion)
@@ -6322,20 +8897,109 @@ def test_get_exclusion(request_type, transport: str = "grpc"):
assert response.disabled is True
-def test_get_exclusion_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_get_exclusion_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.GetExclusionRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call:
- client.get_exclusion()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.get_exclusion(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetExclusionRequest()
+ assert args[0] == logging_config.GetExclusionRequest(
+ name="name_value",
+ )
+
+
+def test_get_exclusion_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.get_exclusion in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc
+ request = {}
+ client.get_exclusion(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.get_exclusion(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_exclusion_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.get_exclusion
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.get_exclusion
+ ] = mock_rpc
+
+ request = {}
+ await client.get_exclusion(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.get_exclusion(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -6343,7 +9007,7 @@ async def test_get_exclusion_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -6367,7 +9031,8 @@ async def test_get_exclusion_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetExclusionRequest()
+ request = logging_config.GetExclusionRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogExclusion)
@@ -6414,7 +9079,7 @@ def test_get_exclusion_field_headers():
@pytest.mark.asyncio
async def test_get_exclusion_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -6484,7 +9149,7 @@ def test_get_exclusion_flattened_error():
@pytest.mark.asyncio
async def test_get_exclusion_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -6513,7 +9178,7 @@ async def test_get_exclusion_flattened_async():
@pytest.mark.asyncio
async def test_get_exclusion_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -6556,7 +9221,8 @@ def test_create_exclusion(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateExclusionRequest()
+ request = logging_config.CreateExclusionRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogExclusion)
@@ -6566,20 +9232,111 @@ def test_create_exclusion(request_type, transport: str = "grpc"):
assert response.disabled is True
-def test_create_exclusion_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_create_exclusion_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.CreateExclusionRequest(
+ parent="parent_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call:
- client.create_exclusion()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.create_exclusion(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateExclusionRequest()
+ assert args[0] == logging_config.CreateExclusionRequest(
+ parent="parent_value",
+ )
+
+
+def test_create_exclusion_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.create_exclusion in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.create_exclusion
+ ] = mock_rpc
+ request = {}
+ client.create_exclusion(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.create_exclusion(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_create_exclusion_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.create_exclusion
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.create_exclusion
+ ] = mock_rpc
+
+ request = {}
+ await client.create_exclusion(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.create_exclusion(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -6587,7 +9344,7 @@ async def test_create_exclusion_async(
transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -6611,7 +9368,8 @@ async def test_create_exclusion_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CreateExclusionRequest()
+ request = logging_config.CreateExclusionRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogExclusion)
@@ -6658,7 +9416,7 @@ def test_create_exclusion_field_headers():
@pytest.mark.asyncio
async def test_create_exclusion_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -6733,7 +9491,7 @@ def test_create_exclusion_flattened_error():
@pytest.mark.asyncio
async def test_create_exclusion_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -6766,7 +9524,7 @@ async def test_create_exclusion_flattened_async():
@pytest.mark.asyncio
async def test_create_exclusion_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -6810,7 +9568,8 @@ def test_update_exclusion(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateExclusionRequest()
+ request = logging_config.UpdateExclusionRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogExclusion)
@@ -6820,20 +9579,111 @@ def test_update_exclusion(request_type, transport: str = "grpc"):
assert response.disabled is True
-def test_update_exclusion_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_update_exclusion_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.UpdateExclusionRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call:
- client.update_exclusion()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.update_exclusion(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateExclusionRequest()
+ assert args[0] == logging_config.UpdateExclusionRequest(
+ name="name_value",
+ )
+
+
+def test_update_exclusion_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.update_exclusion in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.update_exclusion
+ ] = mock_rpc
+ request = {}
+ client.update_exclusion(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.update_exclusion(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_update_exclusion_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.update_exclusion
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.update_exclusion
+ ] = mock_rpc
+
+ request = {}
+ await client.update_exclusion(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.update_exclusion(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -6841,7 +9691,7 @@ async def test_update_exclusion_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -6865,7 +9715,8 @@ async def test_update_exclusion_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateExclusionRequest()
+ request = logging_config.UpdateExclusionRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.LogExclusion)
@@ -6912,7 +9763,7 @@ def test_update_exclusion_field_headers():
@pytest.mark.asyncio
async def test_update_exclusion_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -6992,7 +9843,7 @@ def test_update_exclusion_flattened_error():
@pytest.mark.asyncio
async def test_update_exclusion_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -7029,7 +9880,7 @@ async def test_update_exclusion_flattened_async():
@pytest.mark.asyncio
async def test_update_exclusion_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -7069,26 +9920,118 @@ def test_delete_exclusion(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteExclusionRequest()
+ request = logging_config.DeleteExclusionRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
-def test_delete_exclusion_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_delete_exclusion_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.DeleteExclusionRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call:
- client.delete_exclusion()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.delete_exclusion(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteExclusionRequest()
+ assert args[0] == logging_config.DeleteExclusionRequest(
+ name="name_value",
+ )
+
+
+def test_delete_exclusion_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.delete_exclusion in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.delete_exclusion
+ ] = mock_rpc
+ request = {}
+ client.delete_exclusion(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.delete_exclusion(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_delete_exclusion_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.delete_exclusion
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.delete_exclusion
+ ] = mock_rpc
+
+ request = {}
+ await client.delete_exclusion(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.delete_exclusion(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -7096,7 +10039,7 @@ async def test_delete_exclusion_async(
transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -7113,7 +10056,8 @@ async def test_delete_exclusion_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.DeleteExclusionRequest()
+ request = logging_config.DeleteExclusionRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
@@ -7156,7 +10100,7 @@ def test_delete_exclusion_field_headers():
@pytest.mark.asyncio
async def test_delete_exclusion_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -7224,7 +10168,7 @@ def test_delete_exclusion_flattened_error():
@pytest.mark.asyncio
async def test_delete_exclusion_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -7251,7 +10195,7 @@ async def test_delete_exclusion_flattened_async():
@pytest.mark.asyncio
async def test_delete_exclusion_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -7296,7 +10240,8 @@ def test_get_cmek_settings(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetCmekSettingsRequest()
+ request = logging_config.GetCmekSettingsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.CmekSettings)
@@ -7306,35 +10251,126 @@ def test_get_cmek_settings(request_type, transport: str = "grpc"):
assert response.service_account_id == "service_account_id_value"
-def test_get_cmek_settings_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_get_cmek_settings_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.GetCmekSettingsRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_cmek_settings), "__call__"
) as call:
- client.get_cmek_settings()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.get_cmek_settings(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetCmekSettingsRequest()
+ assert args[0] == logging_config.GetCmekSettingsRequest(
+ name="name_value",
+ )
+
+
+def test_get_cmek_settings_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.get_cmek_settings in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.get_cmek_settings
+ ] = mock_rpc
+ request = {}
+ client.get_cmek_settings(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.get_cmek_settings(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
-async def test_get_cmek_settings_async(
- transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest
+async def test_get_cmek_settings_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
):
- client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.get_cmek_settings
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.get_cmek_settings
+ ] = mock_rpc
+
+ request = {}
+ await client.get_cmek_settings(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.get_cmek_settings(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_cmek_settings_async(
+ transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest
+):
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
@@ -7355,7 +10391,8 @@ async def test_get_cmek_settings_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetCmekSettingsRequest()
+ request = logging_config.GetCmekSettingsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.CmekSettings)
@@ -7404,7 +10441,7 @@ def test_get_cmek_settings_field_headers():
@pytest.mark.asyncio
async def test_get_cmek_settings_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -7468,7 +10505,8 @@ def test_update_cmek_settings(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateCmekSettingsRequest()
+ request = logging_config.UpdateCmekSettingsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.CmekSettings)
@@ -7478,22 +10516,115 @@ def test_update_cmek_settings(request_type, transport: str = "grpc"):
assert response.service_account_id == "service_account_id_value"
-def test_update_cmek_settings_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_update_cmek_settings_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.UpdateCmekSettingsRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_cmek_settings), "__call__"
) as call:
- client.update_cmek_settings()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.update_cmek_settings(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateCmekSettingsRequest()
+ assert args[0] == logging_config.UpdateCmekSettingsRequest(
+ name="name_value",
+ )
+
+
+def test_update_cmek_settings_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._transport.update_cmek_settings in client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.update_cmek_settings
+ ] = mock_rpc
+ request = {}
+ client.update_cmek_settings(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.update_cmek_settings(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_update_cmek_settings_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.update_cmek_settings
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.update_cmek_settings
+ ] = mock_rpc
+
+ request = {}
+ await client.update_cmek_settings(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.update_cmek_settings(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -7502,7 +10633,7 @@ async def test_update_cmek_settings_async(
request_type=logging_config.UpdateCmekSettingsRequest,
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -7528,7 +10659,8 @@ async def test_update_cmek_settings_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateCmekSettingsRequest()
+ request = logging_config.UpdateCmekSettingsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.CmekSettings)
@@ -7577,7 +10709,7 @@ def test_update_cmek_settings_field_headers():
@pytest.mark.asyncio
async def test_update_cmek_settings_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -7640,7 +10772,8 @@ def test_get_settings(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetSettingsRequest()
+ request = logging_config.GetSettingsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.Settings)
@@ -7651,20 +10784,109 @@ def test_get_settings(request_type, transport: str = "grpc"):
assert response.disable_default_sink is True
-def test_get_settings_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_get_settings_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.GetSettingsRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
- client.get_settings()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.get_settings(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetSettingsRequest()
+ assert args[0] == logging_config.GetSettingsRequest(
+ name="name_value",
+ )
+
+
+def test_get_settings_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.get_settings in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc
+ request = {}
+ client.get_settings(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.get_settings(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_settings_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.get_settings
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.get_settings
+ ] = mock_rpc
+
+ request = {}
+ await client.get_settings(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.get_settings(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -7672,7 +10894,7 @@ async def test_get_settings_async(
transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -7697,7 +10919,8 @@ async def test_get_settings_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.GetSettingsRequest()
+ request = logging_config.GetSettingsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.Settings)
@@ -7745,7 +10968,7 @@ def test_get_settings_field_headers():
@pytest.mark.asyncio
async def test_get_settings_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -7815,7 +11038,7 @@ def test_get_settings_flattened_error():
@pytest.mark.asyncio
async def test_get_settings_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -7844,7 +11067,7 @@ async def test_get_settings_flattened_async():
@pytest.mark.asyncio
async def test_get_settings_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -7888,7 +11111,8 @@ def test_update_settings(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateSettingsRequest()
+ request = logging_config.UpdateSettingsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.Settings)
@@ -7899,20 +11123,109 @@ def test_update_settings(request_type, transport: str = "grpc"):
assert response.disable_default_sink is True
-def test_update_settings_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_update_settings_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.UpdateSettingsRequest(
+ name="name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
- client.update_settings()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.update_settings(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateSettingsRequest()
+ assert args[0] == logging_config.UpdateSettingsRequest(
+ name="name_value",
+ )
+
+
+def test_update_settings_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.update_settings in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc
+ request = {}
+ client.update_settings(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.update_settings(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_update_settings_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.update_settings
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.update_settings
+ ] = mock_rpc
+
+ request = {}
+ await client.update_settings(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.update_settings(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -7920,7 +11233,7 @@ async def test_update_settings_async(
transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest
):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -7945,7 +11258,8 @@ async def test_update_settings_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.UpdateSettingsRequest()
+ request = logging_config.UpdateSettingsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_config.Settings)
@@ -7993,7 +11307,7 @@ def test_update_settings_field_headers():
@pytest.mark.asyncio
async def test_update_settings_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -8068,7 +11382,7 @@ def test_update_settings_flattened_error():
@pytest.mark.asyncio
async def test_update_settings_flattened_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -8101,7 +11415,7 @@ async def test_update_settings_flattened_async():
@pytest.mark.asyncio
async def test_update_settings_flattened_error_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -8127,178 +11441,1892 @@ def test_copy_log_entries(request_type, transport: str = "grpc"):
transport=transport,
)
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = request_type()
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+ response = client.copy_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ request = logging_config.CopyLogEntriesRequest()
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_copy_log_entries_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_config.CopyLogEntriesRequest(
+ name="name_value",
+ filter="filter_value",
+ destination="destination_value",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call:
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.copy_log_entries(request=request)
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == logging_config.CopyLogEntriesRequest(
+ name="name_value",
+ filter="filter_value",
+ destination="destination_value",
+ )
+
+
+def test_copy_log_entries_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.copy_log_entries in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.copy_log_entries
+ ] = mock_rpc
+ request = {}
+ client.copy_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ client.copy_log_entries(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_copy_log_entries_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.copy_log_entries
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.copy_log_entries
+ ] = mock_rpc
+
+ request = {}
+ await client.copy_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ # Operation methods call wrapper_fn to build a cached
+ # client._transport.operations_client instance on first rpc call.
+ # Subsequent calls should use the cached wrapper
+ wrapper_fn.reset_mock()
+
+ await client.copy_log_entries(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_copy_log_entries_async(
+ transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest
+):
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ response = await client.copy_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ request = logging_config.CopyLogEntriesRequest()
+ assert args[0] == request
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+@pytest.mark.asyncio
+async def test_copy_log_entries_async_from_dict():
+ await test_copy_log_entries_async(request_type=dict)
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.ConfigServiceV2GrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.ConfigServiceV2GrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ConfigServiceV2Client(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide an api_key and a transport instance.
+ transport = transports.ConfigServiceV2GrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ options = client_options.ClientOptions()
+ options.api_key = "api_key"
+ with pytest.raises(ValueError):
+ client = ConfigServiceV2Client(
+ client_options=options,
+ transport=transport,
+ )
+
+ # It is an error to provide an api_key and a credential.
+ options = client_options.ClientOptions()
+ options.api_key = "api_key"
+ with pytest.raises(ValueError):
+ client = ConfigServiceV2Client(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.ConfigServiceV2GrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = ConfigServiceV2Client(
+ client_options={"scopes": ["1", "2"]},
+ transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.ConfigServiceV2GrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ client = ConfigServiceV2Client(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.ConfigServiceV2GrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.ConfigServiceV2GrpcAsyncIOTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ConfigServiceV2GrpcTransport,
+ transports.ConfigServiceV2GrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_kind_grpc():
+ transport = ConfigServiceV2Client.get_transport_class("grpc")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_buckets_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_buckets), "__call__") as call:
+ call.return_value = logging_config.ListBucketsResponse()
+ client.list_buckets(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListBucketsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_bucket_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_bucket), "__call__") as call:
+ call.return_value = logging_config.LogBucket()
+ client.get_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_bucket_async_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_bucket_async), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.create_bucket_async(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_bucket_async_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_bucket_async), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.update_bucket_async(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_bucket_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_bucket), "__call__") as call:
+ call.return_value = logging_config.LogBucket()
+ client.create_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_bucket_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_bucket), "__call__") as call:
+ call.return_value = logging_config.LogBucket()
+ client.update_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_bucket_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call:
+ call.return_value = None
+ client.delete_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_undelete_bucket_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call:
+ call.return_value = None
+ client.undelete_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UndeleteBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_views_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_views), "__call__") as call:
+ call.return_value = logging_config.ListViewsResponse()
+ client.list_views(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListViewsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_view_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_view), "__call__") as call:
+ call.return_value = logging_config.LogView()
+ client.get_view(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetViewRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_view_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_view), "__call__") as call:
+ call.return_value = logging_config.LogView()
+ client.create_view(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateViewRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_view_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_view), "__call__") as call:
+ call.return_value = logging_config.LogView()
+ client.update_view(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateViewRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_view_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_view), "__call__") as call:
+ call.return_value = None
+ client.delete_view(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteViewRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_sinks_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_sinks), "__call__") as call:
+ call.return_value = logging_config.ListSinksResponse()
+ client.list_sinks(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListSinksRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_sink_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_sink), "__call__") as call:
+ call.return_value = logging_config.LogSink()
+ client.get_sink(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetSinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_sink_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_sink), "__call__") as call:
+ call.return_value = logging_config.LogSink()
+ client.create_sink(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateSinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_sink_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_sink), "__call__") as call:
+ call.return_value = logging_config.LogSink()
+ client.update_sink(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateSinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_sink_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_sink), "__call__") as call:
+ call.return_value = None
+ client.delete_sink(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteSinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_link_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_link), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.create_link(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateLinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_link_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_link), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.delete_link(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteLinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_links_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_links), "__call__") as call:
+ call.return_value = logging_config.ListLinksResponse()
+ client.list_links(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListLinksRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_link_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_link), "__call__") as call:
+ call.return_value = logging_config.Link()
+ client.get_link(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetLinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_exclusions_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call:
+ call.return_value = logging_config.ListExclusionsResponse()
+ client.list_exclusions(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListExclusionsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_exclusion_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call:
+ call.return_value = logging_config.LogExclusion()
+ client.get_exclusion(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetExclusionRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_exclusion_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call:
+ call.return_value = logging_config.LogExclusion()
+ client.create_exclusion(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateExclusionRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_exclusion_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call:
+ call.return_value = logging_config.LogExclusion()
+ client.update_exclusion(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateExclusionRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_exclusion_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call:
+ call.return_value = None
+ client.delete_exclusion(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteExclusionRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_cmek_settings_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_cmek_settings), "__call__"
+ ) as call:
+ call.return_value = logging_config.CmekSettings()
+ client.get_cmek_settings(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetCmekSettingsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_cmek_settings_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_cmek_settings), "__call__"
+ ) as call:
+ call.return_value = logging_config.CmekSettings()
+ client.update_cmek_settings(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateCmekSettingsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_settings_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
+ call.return_value = logging_config.Settings()
+ client.get_settings(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetSettingsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_settings_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
+ call.return_value = logging_config.Settings()
+ client.update_settings(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateSettingsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_copy_log_entries_empty_call_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+ client.copy_log_entries(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CopyLogEntriesRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+ transport = ConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")(
+ credentials=async_anonymous_credentials()
+ )
+ assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_buckets_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_buckets), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.ListBucketsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_buckets(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListBucketsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_bucket_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_bucket), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogBucket(
+ name="name_value",
+ description="description_value",
+ retention_days=1512,
+ locked=True,
+ lifecycle_state=logging_config.LifecycleState.ACTIVE,
+ analytics_enabled=True,
+ restricted_fields=["restricted_fields_value"],
+ )
+ )
+ await client.get_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_bucket_async_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_bucket_async), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.create_bucket_async(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_bucket_async_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_bucket_async), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.update_bucket_async(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_bucket_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_bucket), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogBucket(
+ name="name_value",
+ description="description_value",
+ retention_days=1512,
+ locked=True,
+ lifecycle_state=logging_config.LifecycleState.ACTIVE,
+ analytics_enabled=True,
+ restricted_fields=["restricted_fields_value"],
+ )
+ )
+ await client.create_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_bucket_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_bucket), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogBucket(
+ name="name_value",
+ description="description_value",
+ retention_days=1512,
+ locked=True,
+ lifecycle_state=logging_config.LifecycleState.ACTIVE,
+ analytics_enabled=True,
+ restricted_fields=["restricted_fields_value"],
+ )
+ )
+ await client.update_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_bucket_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.delete_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_undelete_bucket_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.undelete_bucket(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UndeleteBucketRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_views_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_views), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.ListViewsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_views(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListViewsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_view_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_view), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogView(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ )
+ )
+ await client.get_view(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetViewRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_view_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_view), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogView(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ )
+ )
+ await client.create_view(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateViewRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_view_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_view), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogView(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ )
+ )
+ await client.update_view(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateViewRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_view_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_view), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.delete_view(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteViewRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_sinks_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_sinks), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.ListSinksResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_sinks(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListSinksRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_sink_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_sink), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogSink(
+ name="name_value",
+ destination="destination_value",
+ filter="filter_value",
+ description="description_value",
+ disabled=True,
+ output_version_format=logging_config.LogSink.VersionFormat.V2,
+ writer_identity="writer_identity_value",
+ include_children=True,
+ )
+ )
+ await client.get_sink(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetSinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_sink_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_sink), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogSink(
+ name="name_value",
+ destination="destination_value",
+ filter="filter_value",
+ description="description_value",
+ disabled=True,
+ output_version_format=logging_config.LogSink.VersionFormat.V2,
+ writer_identity="writer_identity_value",
+ include_children=True,
+ )
+ )
+ await client.create_sink(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateSinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_sink_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_sink), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogSink(
+ name="name_value",
+ destination="destination_value",
+ filter="filter_value",
+ description="description_value",
+ disabled=True,
+ output_version_format=logging_config.LogSink.VersionFormat.V2,
+ writer_identity="writer_identity_value",
+ include_children=True,
+ )
+ )
+ await client.update_sink(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateSinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_sink_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_sink), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.delete_sink(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteSinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_link_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_link), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.create_link(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateLinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_link_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_link), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.delete_link(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.DeleteLinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_links_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_links), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.ListLinksResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_links(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListLinksRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_link_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_link), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.Link(
+ name="name_value",
+ description="description_value",
+ lifecycle_state=logging_config.LifecycleState.ACTIVE,
+ )
+ )
+ await client.get_link(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetLinkRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_exclusions_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.ListExclusionsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_exclusions(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.ListExclusionsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_exclusion_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogExclusion(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ disabled=True,
+ )
+ )
+ await client.get_exclusion(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetExclusionRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_exclusion_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogExclusion(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ disabled=True,
+ )
+ )
+ await client.create_exclusion(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CreateExclusionRequest()
+
+ assert args[0] == request_msg
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call:
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_exclusion_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call:
# Designate an appropriate return value for the call.
- call.return_value = operations_pb2.Operation(name="operations/spam")
- response = client.copy_log_entries(request)
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.LogExclusion(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ disabled=True,
+ )
+ )
+ await client.update_exclusion(request=None)
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
+ # Establish that the underlying stub method was called.
+ call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CopyLogEntriesRequest()
+ request_msg = logging_config.UpdateExclusionRequest()
- # Establish that the response is the type that we expect.
- assert isinstance(response, future.Future)
+ assert args[0] == request_msg
-def test_copy_log_entries_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_exclusion_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
)
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call:
- client.copy_log_entries()
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.delete_exclusion(request=None)
+
+ # Establish that the underlying stub method was called.
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CopyLogEntriesRequest()
+ request_msg = logging_config.DeleteExclusionRequest()
+
+ assert args[0] == request_msg
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
@pytest.mark.asyncio
-async def test_copy_log_entries_async(
- transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest
-):
+async def test_get_cmek_settings_empty_call_grpc_asyncio():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
)
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = request_type()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call:
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_cmek_settings), "__call__"
+ ) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
- operations_pb2.Operation(name="operations/spam")
+ logging_config.CmekSettings(
+ name="name_value",
+ kms_key_name="kms_key_name_value",
+ kms_key_version_name="kms_key_version_name_value",
+ service_account_id="service_account_id_value",
+ )
)
- response = await client.copy_log_entries(request)
+ await client.get_cmek_settings(request=None)
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls)
+ # Establish that the underlying stub method was called.
+ call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_config.CopyLogEntriesRequest()
+ request_msg = logging_config.GetCmekSettingsRequest()
- # Establish that the response is the type that we expect.
- assert isinstance(response, future.Future)
+ assert args[0] == request_msg
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
@pytest.mark.asyncio
-async def test_copy_log_entries_async_from_dict():
- await test_copy_log_entries_async(request_type=dict)
-
-
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.ConfigServiceV2GrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
+async def test_update_cmek_settings_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
)
- with pytest.raises(ValueError):
- client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials(),
- transport=transport,
- )
- # It is an error to provide a credentials file and a transport instance.
- transport = transports.ConfigServiceV2GrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- with pytest.raises(ValueError):
- client = ConfigServiceV2Client(
- client_options={"credentials_file": "credentials.json"},
- transport=transport,
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_cmek_settings), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.CmekSettings(
+ name="name_value",
+ kms_key_name="kms_key_name_value",
+ kms_key_version_name="kms_key_version_name_value",
+ service_account_id="service_account_id_value",
+ )
)
+ await client.update_cmek_settings(request=None)
- # It is an error to provide an api_key and a transport instance.
- transport = transports.ConfigServiceV2GrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- options = client_options.ClientOptions()
- options.api_key = "api_key"
- with pytest.raises(ValueError):
- client = ConfigServiceV2Client(
- client_options=options,
- transport=transport,
- )
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateCmekSettingsRequest()
- # It is an error to provide an api_key and a credential.
- options = mock.Mock()
- options.api_key = "api_key"
- with pytest.raises(ValueError):
- client = ConfigServiceV2Client(
- client_options=options, credentials=ga_credentials.AnonymousCredentials()
- )
+ assert args[0] == request_msg
- # It is an error to provide scopes and a transport instance.
- transport = transports.ConfigServiceV2GrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_settings_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
)
- with pytest.raises(ValueError):
- client = ConfigServiceV2Client(
- client_options={"scopes": ["1", "2"]},
- transport=transport,
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_settings), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.Settings(
+ name="name_value",
+ kms_key_name="kms_key_name_value",
+ kms_service_account_id="kms_service_account_id_value",
+ storage_location="storage_location_value",
+ disable_default_sink=True,
+ )
)
+ await client.get_settings(request=None)
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.GetSettingsRequest()
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.ConfigServiceV2GrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- client = ConfigServiceV2Client(transport=transport)
- assert client.transport is transport
+ assert args[0] == request_msg
-def test_transport_get_channel():
- # A client may be instantiated with a custom transport instance.
- transport = transports.ConfigServiceV2GrpcTransport(
- credentials=ga_credentials.AnonymousCredentials(),
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_settings_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
)
- channel = transport.grpc_channel
- assert channel
- transport = transports.ConfigServiceV2GrpcAsyncIOTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
- channel = transport.grpc_channel
- assert channel
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.update_settings), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_config.Settings(
+ name="name_value",
+ kms_key_name="kms_key_name_value",
+ kms_service_account_id="kms_service_account_id_value",
+ storage_location="storage_location_value",
+ disable_default_sink=True,
+ )
+ )
+ await client.update_settings(request=None)
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.UpdateSettingsRequest()
-@pytest.mark.parametrize(
- "transport_class",
- [
- transports.ConfigServiceV2GrpcTransport,
- transports.ConfigServiceV2GrpcAsyncIOTransport,
- ],
-)
-def test_transport_adc(transport_class):
- # Test default credentials are used if not provided.
- with mock.patch.object(google.auth, "default") as adc:
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
- transport_class()
- adc.assert_called_once()
+ assert args[0] == request_msg
-@pytest.mark.parametrize(
- "transport_name",
- [
- "grpc",
- ],
-)
-def test_transport_kind(transport_name):
- transport = ConfigServiceV2Client.get_transport_class(transport_name)(
- credentials=ga_credentials.AnonymousCredentials(),
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_copy_log_entries_empty_call_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
)
- assert transport.kind == transport_name
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ await client.copy_log_entries(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_config.CopyLogEntriesRequest()
+
+ assert args[0] == request_msg
def test_transport_grpc_default():
@@ -9073,20 +14101,6 @@ def test_client_with_default_client_info():
prep.assert_called_once_with(client_info)
-@pytest.mark.asyncio
-async def test_transport_close_async():
- client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
- with mock.patch.object(
- type(getattr(client.transport, "grpc_channel")), "close"
- ) as close:
- async with client:
- close.assert_not_called()
- close.assert_called_once()
-
-
def test_cancel_operation(transport: str = "grpc"):
client = ConfigServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
@@ -9112,9 +14126,9 @@ def test_cancel_operation(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_cancel_operation_async(transport: str = "grpc"):
+async def test_cancel_operation_async(transport: str = "grpc_asyncio"):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -9167,7 +14181,7 @@ def test_cancel_operation_field_headers():
@pytest.mark.asyncio
async def test_cancel_operation_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -9212,7 +14226,7 @@ def test_cancel_operation_from_dict():
@pytest.mark.asyncio
async def test_cancel_operation_from_dict_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
@@ -9251,9 +14265,9 @@ def test_get_operation(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_get_operation_async(transport: str = "grpc"):
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -9308,7 +14322,7 @@ def test_get_operation_field_headers():
@pytest.mark.asyncio
async def test_get_operation_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -9355,7 +14369,7 @@ def test_get_operation_from_dict():
@pytest.mark.asyncio
async def test_get_operation_from_dict_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
@@ -9396,9 +14410,9 @@ def test_list_operations(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_list_operations_async(transport: str = "grpc"):
+async def test_list_operations_async(transport: str = "grpc_asyncio"):
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -9453,7 +14467,7 @@ def test_list_operations_field_headers():
@pytest.mark.asyncio
async def test_list_operations_field_headers_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -9500,7 +14514,7 @@ def test_list_operations_from_dict():
@pytest.mark.asyncio
async def test_list_operations_from_dict_async():
client = ConfigServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
@@ -9516,21 +14530,29 @@ async def test_list_operations_from_dict_async():
call.assert_called()
-def test_transport_close():
- transports = {
- "grpc": "_grpc_channel",
- }
+def test_transport_close_grpc():
+ client = ConfigServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
- for transport, close_name in transports.items():
- client = ConfigServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport
- )
- with mock.patch.object(
- type(getattr(client.transport, close_name)), "close"
- ) as close:
- with client:
- close.assert_not_called()
- close.assert_called_once()
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+ client = ConfigServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
def test_client_ctx():
@@ -9570,7 +14592,9 @@ def test_api_key_credentials(client_class, transport_class):
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py
index ba5e56f22..ef3833740 100644
--- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py
+++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,11 +24,20 @@
import grpc
from grpc.experimental import aio
+import json
import math
import pytest
+from google.api_core import api_core_version
from proto.marshal.rules.dates import DurationRule, TimestampRule
from proto.marshal.rules import wrappers
+try:
+ from google.auth.aio import credentials as ga_credentials_async
+
+ HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH_AIO = False
+
from google.api import monitored_resource_pb2 # type: ignore
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
@@ -36,6 +45,7 @@
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
+from google.api_core import retry as retries
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.logging_v2.services.logging_service_v2 import (
@@ -57,10 +67,32 @@
import google.auth
+CRED_INFO_JSON = {
+ "credential_source": "/path/to/file",
+ "credential_type": "service account credentials",
+ "principal": "service-account@example.com",
+}
+CRED_INFO_STRING = json.dumps(CRED_INFO_JSON)
+
+
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+ if HAS_GOOGLE_AUTH_AIO:
+ return ga_credentials_async.AnonymousCredentials()
+ return ga_credentials.AnonymousCredentials()
+
+
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
@@ -72,6 +104,17 @@ def modify_default_endpoint(client):
)
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+ return (
+ "test.{UNIVERSE_DOMAIN}"
+ if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE)
+ else client._DEFAULT_ENDPOINT_TEMPLATE
+ )
+
+
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
@@ -102,6 +145,237 @@ def test__get_default_mtls_endpoint():
)
+def test__read_environment_variables():
+ assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None)
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+ assert LoggingServiceV2Client._read_environment_variables() == (
+ True,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+ assert LoggingServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError) as excinfo:
+ LoggingServiceV2Client._read_environment_variables()
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ assert LoggingServiceV2Client._read_environment_variables() == (
+ False,
+ "never",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ assert LoggingServiceV2Client._read_environment_variables() == (
+ False,
+ "always",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+ assert LoggingServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ LoggingServiceV2Client._read_environment_variables()
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+ assert LoggingServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ "foo.com",
+ )
+
+
+def test__get_client_cert_source():
+ mock_provided_cert_source = mock.Mock()
+ mock_default_cert_source = mock.Mock()
+
+ assert LoggingServiceV2Client._get_client_cert_source(None, False) is None
+ assert (
+ LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False)
+ is None
+ )
+ assert (
+ LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True)
+ == mock_provided_cert_source
+ )
+
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source", return_value=True
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=mock_default_cert_source,
+ ):
+ assert (
+ LoggingServiceV2Client._get_client_cert_source(None, True)
+ is mock_default_cert_source
+ )
+ assert (
+ LoggingServiceV2Client._get_client_cert_source(
+ mock_provided_cert_source, "true"
+ )
+ is mock_provided_cert_source
+ )
+
+
+@mock.patch.object(
+ LoggingServiceV2Client,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(LoggingServiceV2Client),
+)
+@mock.patch.object(
+ LoggingServiceV2AsyncClient,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(LoggingServiceV2AsyncClient),
+)
+def test__get_api_endpoint():
+ api_override = "foo.com"
+ mock_client_cert_source = mock.Mock()
+ default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE
+ default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=default_universe
+ )
+ mock_universe = "bar.com"
+ mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=mock_universe
+ )
+
+ assert (
+ LoggingServiceV2Client._get_api_endpoint(
+ api_override, mock_client_cert_source, default_universe, "always"
+ )
+ == api_override
+ )
+ assert (
+ LoggingServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, default_universe, "auto"
+ )
+ == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto")
+ == default_endpoint
+ )
+ assert (
+ LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always")
+ == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ LoggingServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, default_universe, "always"
+ )
+ == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never")
+ == mock_endpoint
+ )
+ assert (
+ LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never")
+ == default_endpoint
+ )
+
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ LoggingServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, mock_universe, "auto"
+ )
+ assert (
+ str(excinfo.value)
+ == "mTLS is not supported in any universe other than googleapis.com."
+ )
+
+
+def test__get_universe_domain():
+ client_universe_domain = "foo.com"
+ universe_domain_env = "bar.com"
+
+ assert (
+ LoggingServiceV2Client._get_universe_domain(
+ client_universe_domain, universe_domain_env
+ )
+ == client_universe_domain
+ )
+ assert (
+ LoggingServiceV2Client._get_universe_domain(None, universe_domain_env)
+ == universe_domain_env
+ )
+ assert (
+ LoggingServiceV2Client._get_universe_domain(None, None)
+ == LoggingServiceV2Client._DEFAULT_UNIVERSE
+ )
+
+ with pytest.raises(ValueError) as excinfo:
+ LoggingServiceV2Client._get_universe_domain("", None)
+ assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+
+@pytest.mark.parametrize(
+ "error_code,cred_info_json,show_cred_info",
+ [
+ (401, CRED_INFO_JSON, True),
+ (403, CRED_INFO_JSON, True),
+ (404, CRED_INFO_JSON, True),
+ (500, CRED_INFO_JSON, False),
+ (401, None, False),
+ (403, None, False),
+ (404, None, False),
+ (500, None, False),
+ ],
+)
+def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info):
+ cred = mock.Mock(["get_cred_info"])
+ cred.get_cred_info = mock.Mock(return_value=cred_info_json)
+ client = LoggingServiceV2Client(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=["foo"])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ if show_cred_info:
+ assert error.details == ["foo", CRED_INFO_STRING]
+ else:
+ assert error.details == ["foo"]
+
+
+@pytest.mark.parametrize("error_code", [401, 403, 404, 500])
+def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code):
+ cred = mock.Mock([])
+ assert not hasattr(cred, "get_cred_info")
+ client = LoggingServiceV2Client(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=[])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ assert error.details == []
+
+
@pytest.mark.parametrize(
"client_class,transport_name",
[
@@ -204,13 +478,13 @@ def test_logging_service_v2_client_get_transport_class():
)
@mock.patch.object(
LoggingServiceV2Client,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(LoggingServiceV2Client),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(LoggingServiceV2Client),
)
@mock.patch.object(
LoggingServiceV2AsyncClient,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(LoggingServiceV2AsyncClient),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(LoggingServiceV2AsyncClient),
)
def test_logging_service_v2_client_client_options(
client_class, transport_class, transport_name
@@ -252,7 +526,9 @@ def test_logging_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -282,15 +558,23 @@ def test_logging_service_v2_client_client_options(
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
client = client_class(transport=transport_name)
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
- with pytest.raises(ValueError):
+ with pytest.raises(ValueError) as excinfo:
client = client_class(transport=transport_name)
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
@@ -300,7 +584,9 @@ def test_logging_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
@@ -318,7 +604,9 @@ def test_logging_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -359,13 +647,13 @@ def test_logging_service_v2_client_client_options(
)
@mock.patch.object(
LoggingServiceV2Client,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(LoggingServiceV2Client),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(LoggingServiceV2Client),
)
@mock.patch.object(
LoggingServiceV2AsyncClient,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(LoggingServiceV2AsyncClient),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(LoggingServiceV2AsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_logging_service_v2_client_mtls_env_auto(
@@ -388,7 +676,9 @@ def test_logging_service_v2_client_mtls_env_auto(
if use_client_cert_env == "false":
expected_client_cert_source = None
- expected_host = client.DEFAULT_ENDPOINT
+ expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ )
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
@@ -420,7 +710,9 @@ def test_logging_service_v2_client_mtls_env_auto(
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
- expected_host = client.DEFAULT_ENDPOINT
+ expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ )
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
@@ -454,7 +746,9 @@ def test_logging_service_v2_client_mtls_env_auto(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -544,6 +838,115 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ client_class.get_mtls_endpoint_and_cert_source()
+
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError) as excinfo:
+ client_class.get_mtls_endpoint_and_cert_source()
+
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient]
+)
+@mock.patch.object(
+ LoggingServiceV2Client,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(LoggingServiceV2Client),
+)
+@mock.patch.object(
+ LoggingServiceV2AsyncClient,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(LoggingServiceV2AsyncClient),
+)
+def test_logging_service_v2_client_client_api_endpoint(client_class):
+ mock_client_cert_source = client_cert_source_callback
+ api_override = "foo.com"
+ default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE
+ default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=default_universe
+ )
+ mock_universe = "bar.com"
+ mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=mock_universe
+ )
+
+ # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+ # use ClientOptions.api_endpoint as the api endpoint regardless.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+ with mock.patch(
+ "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=mock_client_cert_source, api_endpoint=api_override
+ )
+ client = client_class(
+ client_options=options,
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ assert client.api_endpoint == api_override
+
+ # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ client = client_class(credentials=ga_credentials.AnonymousCredentials())
+ assert client.api_endpoint == default_endpoint
+
+ # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+ # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ client = client_class(credentials=ga_credentials.AnonymousCredentials())
+ assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+ # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+ # and ClientOptions.universe_domain="bar.com",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+ options = client_options.ClientOptions()
+ universe_exists = hasattr(options, "universe_domain")
+ if universe_exists:
+ options = client_options.ClientOptions(universe_domain=mock_universe)
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ else:
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert client.api_endpoint == (
+ mock_endpoint if universe_exists else default_endpoint
+ )
+ assert client.universe_domain == (
+ mock_universe if universe_exists else default_universe
+ )
+
+ # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+ options = client_options.ClientOptions()
+ if hasattr(options, "universe_domain"):
+ delattr(options, "universe_domain")
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert client.api_endpoint == default_endpoint
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -569,7 +972,9 @@ def test_logging_service_v2_client_client_options_scopes(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -608,7 +1013,9 @@ def test_logging_service_v2_client_client_options_credentials_file(
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -668,7 +1075,9 @@ def test_logging_service_v2_client_create_channel_credentials_file(
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -738,26 +1147,114 @@ def test_delete_log(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.DeleteLogRequest()
+ request = logging.DeleteLogRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
-def test_delete_log_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_delete_log_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = LoggingServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging.DeleteLogRequest(
+ log_name="log_name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_log), "__call__") as call:
- client.delete_log()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.delete_log(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.DeleteLogRequest()
+ assert args[0] == logging.DeleteLogRequest(
+ log_name="log_name_value",
+ )
+
+
+def test_delete_log_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.delete_log in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc
+ request = {}
+ client.delete_log(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.delete_log(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.delete_log
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.delete_log
+ ] = mock_rpc
+
+ request = {}
+ await client.delete_log(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.delete_log(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -765,7 +1262,7 @@ async def test_delete_log_async(
transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest
):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -782,7 +1279,8 @@ async def test_delete_log_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.DeleteLogRequest()
+ request = logging.DeleteLogRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
@@ -825,7 +1323,7 @@ def test_delete_log_field_headers():
@pytest.mark.asyncio
async def test_delete_log_field_headers_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -893,7 +1391,7 @@ def test_delete_log_flattened_error():
@pytest.mark.asyncio
async def test_delete_log_flattened_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -920,7 +1418,7 @@ async def test_delete_log_flattened_async():
@pytest.mark.asyncio
async def test_delete_log_flattened_error_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -960,28 +1458,120 @@ def test_write_log_entries(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.WriteLogEntriesRequest()
+ request = logging.WriteLogEntriesRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging.WriteLogEntriesResponse)
-def test_write_log_entries_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_write_log_entries_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = LoggingServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging.WriteLogEntriesRequest(
+ log_name="log_name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.write_log_entries), "__call__"
) as call:
- client.write_log_entries()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.write_log_entries(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.WriteLogEntriesRequest()
+ assert args[0] == logging.WriteLogEntriesRequest(
+ log_name="log_name_value",
+ )
+
+
+def test_write_log_entries_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.write_log_entries in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.write_log_entries
+ ] = mock_rpc
+ request = {}
+ client.write_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.write_log_entries(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_write_log_entries_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.write_log_entries
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.write_log_entries
+ ] = mock_rpc
+
+ request = {}
+ await client.write_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.write_log_entries(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -989,7 +1579,7 @@ async def test_write_log_entries_async(
transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest
):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1010,7 +1600,8 @@ async def test_write_log_entries_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.WriteLogEntriesRequest()
+ request = logging.WriteLogEntriesRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging.WriteLogEntriesResponse)
@@ -1079,7 +1670,7 @@ def test_write_log_entries_flattened_error():
@pytest.mark.asyncio
async def test_write_log_entries_flattened_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1122,7 +1713,7 @@ async def test_write_log_entries_flattened_async():
@pytest.mark.asyncio
async def test_write_log_entries_flattened_error_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1165,27 +1756,123 @@ def test_list_log_entries(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListLogEntriesRequest()
+ request = logging.ListLogEntriesRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListLogEntriesPager)
assert response.next_page_token == "next_page_token_value"
-def test_list_log_entries_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_log_entries_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = LoggingServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging.ListLogEntriesRequest(
+ filter="filter_value",
+ order_by="order_by_value",
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call:
- client.list_log_entries()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_log_entries(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListLogEntriesRequest()
+ assert args[0] == logging.ListLogEntriesRequest(
+ filter="filter_value",
+ order_by="order_by_value",
+ page_token="page_token_value",
+ )
+
+
+def test_list_log_entries_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.list_log_entries in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.list_log_entries
+ ] = mock_rpc
+ request = {}
+ client.list_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_log_entries(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_log_entries_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_log_entries
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_log_entries
+ ] = mock_rpc
+
+ request = {}
+ await client.list_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_log_entries(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1193,7 +1880,7 @@ async def test_list_log_entries_async(
transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest
):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1214,7 +1901,8 @@ async def test_list_log_entries_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListLogEntriesRequest()
+ request = logging.ListLogEntriesRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListLogEntriesAsyncPager)
@@ -1277,7 +1965,7 @@ def test_list_log_entries_flattened_error():
@pytest.mark.asyncio
async def test_list_log_entries_flattened_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1314,7 +2002,7 @@ async def test_list_log_entries_flattened_async():
@pytest.mark.asyncio
async def test_list_log_entries_flattened_error_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1330,7 +2018,7 @@ async def test_list_log_entries_flattened_error_async():
def test_list_log_entries_pager(transport_name: str = "grpc"):
client = LoggingServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -1365,10 +2053,14 @@ def test_list_log_entries_pager(transport_name: str = "grpc"):
RuntimeError,
)
- metadata = ()
- pager = client.list_log_entries(request={})
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ pager = client.list_log_entries(request={}, retry=retry, timeout=timeout)
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -1377,7 +2069,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"):
def test_list_log_entries_pages(transport_name: str = "grpc"):
client = LoggingServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -1419,7 +2111,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_log_entries_async_pager():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1469,7 +2161,7 @@ async def test_list_log_entries_async_pager():
@pytest.mark.asyncio
async def test_list_log_entries_async_pages():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1545,29 +2237,124 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = "grp
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListMonitoredResourceDescriptorsRequest()
+ request = logging.ListMonitoredResourceDescriptorsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager)
assert response.next_page_token == "next_page_token_value"
-def test_list_monitored_resource_descriptors_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = LoggingServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging.ListMonitoredResourceDescriptorsRequest(
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_monitored_resource_descriptors), "__call__"
) as call:
- client.list_monitored_resource_descriptors()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_monitored_resource_descriptors(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListMonitoredResourceDescriptorsRequest()
+ assert args[0] == logging.ListMonitoredResourceDescriptorsRequest(
+ page_token="page_token_value",
+ )
+
+
+def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._transport.list_monitored_resource_descriptors
+ in client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.list_monitored_resource_descriptors
+ ] = mock_rpc
+ request = {}
+ client.list_monitored_resource_descriptors(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_monitored_resource_descriptors(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_monitored_resource_descriptors
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_monitored_resource_descriptors
+ ] = mock_rpc
+
+ request = {}
+ await client.list_monitored_resource_descriptors(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_monitored_resource_descriptors(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1576,7 +2363,7 @@ async def test_list_monitored_resource_descriptors_async(
request_type=logging.ListMonitoredResourceDescriptorsRequest,
):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1599,7 +2386,8 @@ async def test_list_monitored_resource_descriptors_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListMonitoredResourceDescriptorsRequest()
+ request = logging.ListMonitoredResourceDescriptorsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager)
@@ -1613,7 +2401,7 @@ async def test_list_monitored_resource_descriptors_async_from_dict():
def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"):
client = LoggingServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -1650,10 +2438,16 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc")
RuntimeError,
)
- metadata = ()
- pager = client.list_monitored_resource_descriptors(request={})
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ pager = client.list_monitored_resource_descriptors(
+ request={}, retry=retry, timeout=timeout
+ )
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -1665,7 +2459,7 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc")
def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"):
client = LoggingServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -1709,7 +2503,7 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc")
@pytest.mark.asyncio
async def test_list_monitored_resource_descriptors_async_pager():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1764,7 +2558,7 @@ async def test_list_monitored_resource_descriptors_async_pager():
@pytest.mark.asyncio
async def test_list_monitored_resource_descriptors_async_pages():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1841,7 +2635,8 @@ def test_list_logs(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListLogsRequest()
+ request = logging.ListLogsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListLogsPager)
@@ -1849,20 +2644,109 @@ def test_list_logs(request_type, transport: str = "grpc"):
assert response.next_page_token == "next_page_token_value"
-def test_list_logs_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_logs_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = LoggingServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging.ListLogsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_logs), "__call__") as call:
- client.list_logs()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_logs(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListLogsRequest()
+ assert args[0] == logging.ListLogsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
+
+def test_list_logs_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.list_logs in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc
+ request = {}
+ client.list_logs(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_logs(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_logs
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_logs
+ ] = mock_rpc
+
+ request = {}
+ await client.list_logs(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_logs(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1870,7 +2754,7 @@ async def test_list_logs_async(
transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest
):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1892,7 +2776,8 @@ async def test_list_logs_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging.ListLogsRequest()
+ request = logging.ListLogsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListLogsAsyncPager)
@@ -1937,7 +2822,7 @@ def test_list_logs_field_headers():
@pytest.mark.asyncio
async def test_list_logs_field_headers_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2007,7 +2892,7 @@ def test_list_logs_flattened_error():
@pytest.mark.asyncio
async def test_list_logs_flattened_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2036,7 +2921,7 @@ async def test_list_logs_flattened_async():
@pytest.mark.asyncio
async def test_list_logs_flattened_error_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2050,7 +2935,7 @@ async def test_list_logs_flattened_error_async():
def test_list_logs_pager(transport_name: str = "grpc"):
client = LoggingServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -2085,13 +2970,17 @@ def test_list_logs_pager(transport_name: str = "grpc"):
RuntimeError,
)
- metadata = ()
- metadata = tuple(metadata) + (
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ expected_metadata = tuple(expected_metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
- pager = client.list_logs(request={})
+ pager = client.list_logs(request={}, retry=retry, timeout=timeout)
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -2100,7 +2989,7 @@ def test_list_logs_pager(transport_name: str = "grpc"):
def test_list_logs_pages(transport_name: str = "grpc"):
client = LoggingServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -2142,7 +3031,7 @@ def test_list_logs_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_logs_async_pager():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2192,7 +3081,7 @@ async def test_list_logs_async_pager():
@pytest.mark.asyncio
async def test_list_logs_async_pages():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2272,12 +3161,91 @@ def test_tail_log_entries(request_type, transport: str = "grpc"):
assert isinstance(message, logging.TailLogEntriesResponse)
+def test_tail_log_entries_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.tail_log_entries in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.tail_log_entries
+ ] = mock_rpc
+ request = [{}]
+ client.tail_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.tail_log_entries(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_tail_log_entries_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.tail_log_entries
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.tail_log_entries
+ ] = mock_rpc
+
+ request = [{}]
+ await client.tail_log_entries(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.tail_log_entries(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
@pytest.mark.asyncio
async def test_tail_log_entries_async(
transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest
):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2344,7 +3312,7 @@ def test_credentials_transport_error():
)
# It is an error to provide an api_key and a credential.
- options = mock.Mock()
+ options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = LoggingServiceV2Client(
@@ -2401,17 +3369,275 @@ def test_transport_adc(transport_class):
adc.assert_called_once()
-@pytest.mark.parametrize(
- "transport_name",
- [
- "grpc",
- ],
-)
-def test_transport_kind(transport_name):
- transport = LoggingServiceV2Client.get_transport_class(transport_name)(
+def test_transport_kind_grpc():
+ transport = LoggingServiceV2Client.get_transport_class("grpc")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_log_empty_call_grpc():
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_log), "__call__") as call:
+ call.return_value = None
+ client.delete_log(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.DeleteLogRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_write_log_entries_empty_call_grpc():
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.write_log_entries), "__call__"
+ ) as call:
+ call.return_value = logging.WriteLogEntriesResponse()
+ client.write_log_entries(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.WriteLogEntriesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_log_entries_empty_call_grpc():
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call:
+ call.return_value = logging.ListLogEntriesResponse()
+ client.list_log_entries(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.ListLogEntriesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_monitored_resource_descriptors_empty_call_grpc():
+ client = LoggingServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_monitored_resource_descriptors), "__call__"
+ ) as call:
+ call.return_value = logging.ListMonitoredResourceDescriptorsResponse()
+ client.list_monitored_resource_descriptors(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.ListMonitoredResourceDescriptorsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_logs_empty_call_grpc():
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_logs), "__call__") as call:
+ call.return_value = logging.ListLogsResponse()
+ client.list_logs(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.ListLogsRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+ transport = LoggingServiceV2AsyncClient.get_transport_class("grpc_asyncio")(
+ credentials=async_anonymous_credentials()
+ )
+ assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_log_empty_call_grpc_asyncio():
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.delete_log), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.delete_log(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.DeleteLogRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_write_log_entries_empty_call_grpc_asyncio():
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.write_log_entries), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging.WriteLogEntriesResponse()
+ )
+ await client.write_log_entries(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.WriteLogEntriesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_log_entries_empty_call_grpc_asyncio():
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging.ListLogEntriesResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_log_entries(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.ListLogEntriesRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio():
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_monitored_resource_descriptors), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging.ListMonitoredResourceDescriptorsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_monitored_resource_descriptors(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.ListMonitoredResourceDescriptorsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_logs_empty_call_grpc_asyncio():
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
)
- assert transport.kind == transport_name
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_logs), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging.ListLogsResponse(
+ log_names=["log_names_value"],
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_logs(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging.ListLogsRequest()
+
+ assert args[0] == request_msg
def test_transport_grpc_default():
@@ -2974,20 +4200,6 @@ def test_client_with_default_client_info():
prep.assert_called_once_with(client_info)
-@pytest.mark.asyncio
-async def test_transport_close_async():
- client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
- with mock.patch.object(
- type(getattr(client.transport, "grpc_channel")), "close"
- ) as close:
- async with client:
- close.assert_not_called()
- close.assert_called_once()
-
-
def test_cancel_operation(transport: str = "grpc"):
client = LoggingServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
@@ -3013,9 +4225,9 @@ def test_cancel_operation(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_cancel_operation_async(transport: str = "grpc"):
+async def test_cancel_operation_async(transport: str = "grpc_asyncio"):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3068,7 +4280,7 @@ def test_cancel_operation_field_headers():
@pytest.mark.asyncio
async def test_cancel_operation_field_headers_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3113,7 +4325,7 @@ def test_cancel_operation_from_dict():
@pytest.mark.asyncio
async def test_cancel_operation_from_dict_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
@@ -3152,9 +4364,9 @@ def test_get_operation(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_get_operation_async(transport: str = "grpc"):
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3209,7 +4421,7 @@ def test_get_operation_field_headers():
@pytest.mark.asyncio
async def test_get_operation_field_headers_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3256,7 +4468,7 @@ def test_get_operation_from_dict():
@pytest.mark.asyncio
async def test_get_operation_from_dict_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
@@ -3297,9 +4509,9 @@ def test_list_operations(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_list_operations_async(transport: str = "grpc"):
+async def test_list_operations_async(transport: str = "grpc_asyncio"):
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3354,7 +4566,7 @@ def test_list_operations_field_headers():
@pytest.mark.asyncio
async def test_list_operations_field_headers_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3401,7 +4613,7 @@ def test_list_operations_from_dict():
@pytest.mark.asyncio
async def test_list_operations_from_dict_async():
client = LoggingServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
@@ -3417,21 +4629,29 @@ async def test_list_operations_from_dict_async():
call.assert_called()
-def test_transport_close():
- transports = {
- "grpc": "_grpc_channel",
- }
+def test_transport_close_grpc():
+ client = LoggingServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
- for transport, close_name in transports.items():
- client = LoggingServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport
- )
- with mock.patch.object(
- type(getattr(client.transport, close_name)), "close"
- ) as close:
- with client:
- close.assert_not_called()
- close.assert_called_once()
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+ client = LoggingServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
def test_client_ctx():
@@ -3471,7 +4691,9 @@ def test_api_key_credentials(client_class, transport_class):
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py
index 00e443415..7c59a09f1 100644
--- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py
+++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2023 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,11 +24,20 @@
import grpc
from grpc.experimental import aio
+import json
import math
import pytest
+from google.api_core import api_core_version
from proto.marshal.rules.dates import DurationRule, TimestampRule
from proto.marshal.rules import wrappers
+try:
+ from google.auth.aio import credentials as ga_credentials_async
+
+ HAS_GOOGLE_AUTH_AIO = True
+except ImportError: # pragma: NO COVER
+ HAS_GOOGLE_AUTH_AIO = False
+
from google.api import distribution_pb2 # type: ignore
from google.api import label_pb2 # type: ignore
from google.api import launch_stage_pb2 # type: ignore
@@ -39,6 +48,7 @@
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import path_template
+from google.api_core import retry as retries
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.logging_v2.services.metrics_service_v2 import (
@@ -55,10 +65,32 @@
import google.auth
+CRED_INFO_JSON = {
+ "credential_source": "/path/to/file",
+ "credential_type": "service account credentials",
+ "principal": "service-account@example.com",
+}
+CRED_INFO_STRING = json.dumps(CRED_INFO_JSON)
+
+
+async def mock_async_gen(data, chunk_size=1):
+ for i in range(0, len(data)): # pragma: NO COVER
+ chunk = data[i : i + chunk_size]
+ yield chunk.encode("utf-8")
+
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded.
+# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107.
+def async_anonymous_credentials():
+ if HAS_GOOGLE_AUTH_AIO:
+ return ga_credentials_async.AnonymousCredentials()
+ return ga_credentials.AnonymousCredentials()
+
+
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
@@ -70,6 +102,17 @@ def modify_default_endpoint(client):
)
+# If default endpoint template is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint template so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint_template(client):
+ return (
+ "test.{UNIVERSE_DOMAIN}"
+ if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE)
+ else client._DEFAULT_ENDPOINT_TEMPLATE
+ )
+
+
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
@@ -100,6 +143,237 @@ def test__get_default_mtls_endpoint():
)
+def test__read_environment_variables():
+ assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None)
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+ assert MetricsServiceV2Client._read_environment_variables() == (
+ True,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}):
+ assert MetricsServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError) as excinfo:
+ MetricsServiceV2Client._read_environment_variables()
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ assert MetricsServiceV2Client._read_environment_variables() == (
+ False,
+ "never",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ assert MetricsServiceV2Client._read_environment_variables() == (
+ False,
+ "always",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}):
+ assert MetricsServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ None,
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ MetricsServiceV2Client._read_environment_variables()
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+
+ with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}):
+ assert MetricsServiceV2Client._read_environment_variables() == (
+ False,
+ "auto",
+ "foo.com",
+ )
+
+
+def test__get_client_cert_source():
+ mock_provided_cert_source = mock.Mock()
+ mock_default_cert_source = mock.Mock()
+
+ assert MetricsServiceV2Client._get_client_cert_source(None, False) is None
+ assert (
+ MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, False)
+ is None
+ )
+ assert (
+ MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, True)
+ == mock_provided_cert_source
+ )
+
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source", return_value=True
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=mock_default_cert_source,
+ ):
+ assert (
+ MetricsServiceV2Client._get_client_cert_source(None, True)
+ is mock_default_cert_source
+ )
+ assert (
+ MetricsServiceV2Client._get_client_cert_source(
+ mock_provided_cert_source, "true"
+ )
+ is mock_provided_cert_source
+ )
+
+
+@mock.patch.object(
+ MetricsServiceV2Client,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(MetricsServiceV2Client),
+)
+@mock.patch.object(
+ MetricsServiceV2AsyncClient,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(MetricsServiceV2AsyncClient),
+)
+def test__get_api_endpoint():
+ api_override = "foo.com"
+ mock_client_cert_source = mock.Mock()
+ default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE
+ default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=default_universe
+ )
+ mock_universe = "bar.com"
+ mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=mock_universe
+ )
+
+ assert (
+ MetricsServiceV2Client._get_api_endpoint(
+ api_override, mock_client_cert_source, default_universe, "always"
+ )
+ == api_override
+ )
+ assert (
+ MetricsServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, default_universe, "auto"
+ )
+ == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "auto")
+ == default_endpoint
+ )
+ assert (
+ MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "always")
+ == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ MetricsServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, default_universe, "always"
+ )
+ == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT
+ )
+ assert (
+ MetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never")
+ == mock_endpoint
+ )
+ assert (
+ MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "never")
+ == default_endpoint
+ )
+
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ MetricsServiceV2Client._get_api_endpoint(
+ None, mock_client_cert_source, mock_universe, "auto"
+ )
+ assert (
+ str(excinfo.value)
+ == "mTLS is not supported in any universe other than googleapis.com."
+ )
+
+
+def test__get_universe_domain():
+ client_universe_domain = "foo.com"
+ universe_domain_env = "bar.com"
+
+ assert (
+ MetricsServiceV2Client._get_universe_domain(
+ client_universe_domain, universe_domain_env
+ )
+ == client_universe_domain
+ )
+ assert (
+ MetricsServiceV2Client._get_universe_domain(None, universe_domain_env)
+ == universe_domain_env
+ )
+ assert (
+ MetricsServiceV2Client._get_universe_domain(None, None)
+ == MetricsServiceV2Client._DEFAULT_UNIVERSE
+ )
+
+ with pytest.raises(ValueError) as excinfo:
+ MetricsServiceV2Client._get_universe_domain("", None)
+ assert str(excinfo.value) == "Universe Domain cannot be an empty string."
+
+
+@pytest.mark.parametrize(
+ "error_code,cred_info_json,show_cred_info",
+ [
+ (401, CRED_INFO_JSON, True),
+ (403, CRED_INFO_JSON, True),
+ (404, CRED_INFO_JSON, True),
+ (500, CRED_INFO_JSON, False),
+ (401, None, False),
+ (403, None, False),
+ (404, None, False),
+ (500, None, False),
+ ],
+)
+def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info):
+ cred = mock.Mock(["get_cred_info"])
+ cred.get_cred_info = mock.Mock(return_value=cred_info_json)
+ client = MetricsServiceV2Client(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=["foo"])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ if show_cred_info:
+ assert error.details == ["foo", CRED_INFO_STRING]
+ else:
+ assert error.details == ["foo"]
+
+
+@pytest.mark.parametrize("error_code", [401, 403, 404, 500])
+def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code):
+ cred = mock.Mock([])
+ assert not hasattr(cred, "get_cred_info")
+ client = MetricsServiceV2Client(credentials=cred)
+ client._transport._credentials = cred
+
+ error = core_exceptions.GoogleAPICallError("message", details=[])
+ error.code = error_code
+
+ client._add_cred_info_for_auth_errors(error)
+ assert error.details == []
+
+
@pytest.mark.parametrize(
"client_class,transport_name",
[
@@ -217,13 +491,13 @@ def test_metrics_service_v2_client_get_transport_class():
)
@mock.patch.object(
MetricsServiceV2Client,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(MetricsServiceV2Client),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(MetricsServiceV2Client),
)
@mock.patch.object(
MetricsServiceV2AsyncClient,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(MetricsServiceV2AsyncClient),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(MetricsServiceV2AsyncClient),
)
def test_metrics_service_v2_client_client_options(
client_class, transport_class, transport_name
@@ -265,7 +539,9 @@ def test_metrics_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -295,15 +571,23 @@ def test_metrics_service_v2_client_client_options(
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
client = client_class(transport=transport_name)
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
- with pytest.raises(ValueError):
+ with pytest.raises(ValueError) as excinfo:
client = client_class(transport=transport_name)
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
@@ -313,7 +597,9 @@ def test_metrics_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
@@ -331,7 +617,9 @@ def test_metrics_service_v2_client_client_options(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -372,13 +660,13 @@ def test_metrics_service_v2_client_client_options(
)
@mock.patch.object(
MetricsServiceV2Client,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(MetricsServiceV2Client),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(MetricsServiceV2Client),
)
@mock.patch.object(
MetricsServiceV2AsyncClient,
- "DEFAULT_ENDPOINT",
- modify_default_endpoint(MetricsServiceV2AsyncClient),
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(MetricsServiceV2AsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_metrics_service_v2_client_mtls_env_auto(
@@ -401,7 +689,9 @@ def test_metrics_service_v2_client_mtls_env_auto(
if use_client_cert_env == "false":
expected_client_cert_source = None
- expected_host = client.DEFAULT_ENDPOINT
+ expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ )
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
@@ -433,7 +723,9 @@ def test_metrics_service_v2_client_mtls_env_auto(
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
- expected_host = client.DEFAULT_ENDPOINT
+ expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ )
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
@@ -467,7 +759,9 @@ def test_metrics_service_v2_client_mtls_env_auto(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -557,6 +851,115 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas
assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
assert cert_source == mock_client_cert_source
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError) as excinfo:
+ client_class.get_mtls_endpoint_and_cert_source()
+
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
+ )
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError) as excinfo:
+ client_class.get_mtls_endpoint_and_cert_source()
+
+ assert (
+ str(excinfo.value)
+ == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient]
+)
+@mock.patch.object(
+ MetricsServiceV2Client,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(MetricsServiceV2Client),
+)
+@mock.patch.object(
+ MetricsServiceV2AsyncClient,
+ "_DEFAULT_ENDPOINT_TEMPLATE",
+ modify_default_endpoint_template(MetricsServiceV2AsyncClient),
+)
+def test_metrics_service_v2_client_client_api_endpoint(client_class):
+ mock_client_cert_source = client_cert_source_callback
+ api_override = "foo.com"
+ default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE
+ default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=default_universe
+ )
+ mock_universe = "bar.com"
+ mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=mock_universe
+ )
+
+ # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true",
+ # use ClientOptions.api_endpoint as the api endpoint regardless.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}):
+ with mock.patch(
+ "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=mock_client_cert_source, api_endpoint=api_override
+ )
+ client = client_class(
+ client_options=options,
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ assert client.api_endpoint == api_override
+
+ # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ client = client_class(credentials=ga_credentials.AnonymousCredentials())
+ assert client.api_endpoint == default_endpoint
+
+ # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always",
+ # use the DEFAULT_MTLS_ENDPOINT as the api endpoint.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ client = client_class(credentials=ga_credentials.AnonymousCredentials())
+ assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT
+
+ # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default),
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist,
+ # and ClientOptions.universe_domain="bar.com",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint.
+ options = client_options.ClientOptions()
+ universe_exists = hasattr(options, "universe_domain")
+ if universe_exists:
+ options = client_options.ClientOptions(universe_domain=mock_universe)
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ else:
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert client.api_endpoint == (
+ mock_endpoint if universe_exists else default_endpoint
+ )
+ assert client.universe_domain == (
+ mock_universe if universe_exists else default_universe
+ )
+
+ # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never",
+ # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint.
+ options = client_options.ClientOptions()
+ if hasattr(options, "universe_domain"):
+ delattr(options, "universe_domain")
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ client = client_class(
+ client_options=options, credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert client.api_endpoint == default_endpoint
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -582,7 +985,9 @@ def test_metrics_service_v2_client_client_options_scopes(
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -621,7 +1026,9 @@ def test_metrics_service_v2_client_client_options_credentials_file(
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -681,7 +1088,9 @@ def test_metrics_service_v2_client_create_channel_credentials_file(
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
@@ -753,27 +1162,121 @@ def test_list_log_metrics(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.ListLogMetricsRequest()
+ request = logging_metrics.ListLogMetricsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListLogMetricsPager)
assert response.next_page_token == "next_page_token_value"
-def test_list_log_metrics_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_list_log_metrics_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = MetricsServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_metrics.ListLogMetricsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call:
- client.list_log_metrics()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.list_log_metrics(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.ListLogMetricsRequest()
+ assert args[0] == logging_metrics.ListLogMetricsRequest(
+ parent="parent_value",
+ page_token="page_token_value",
+ )
+
+
+def test_list_log_metrics_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.list_log_metrics in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.list_log_metrics
+ ] = mock_rpc
+ request = {}
+ client.list_log_metrics(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.list_log_metrics(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_list_log_metrics_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.list_log_metrics
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.list_log_metrics
+ ] = mock_rpc
+
+ request = {}
+ await client.list_log_metrics(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.list_log_metrics(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -781,7 +1284,7 @@ async def test_list_log_metrics_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest
):
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -802,7 +1305,8 @@ async def test_list_log_metrics_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.ListLogMetricsRequest()
+ request = logging_metrics.ListLogMetricsRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListLogMetricsAsyncPager)
@@ -846,7 +1350,7 @@ def test_list_log_metrics_field_headers():
@pytest.mark.asyncio
async def test_list_log_metrics_field_headers_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -916,7 +1420,7 @@ def test_list_log_metrics_flattened_error():
@pytest.mark.asyncio
async def test_list_log_metrics_flattened_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -945,7 +1449,7 @@ async def test_list_log_metrics_flattened_async():
@pytest.mark.asyncio
async def test_list_log_metrics_flattened_error_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -959,7 +1463,7 @@ async def test_list_log_metrics_flattened_error_async():
def test_list_log_metrics_pager(transport_name: str = "grpc"):
client = MetricsServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -994,13 +1498,17 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"):
RuntimeError,
)
- metadata = ()
- metadata = tuple(metadata) + (
+ expected_metadata = ()
+ retry = retries.Retry()
+ timeout = 5
+ expected_metadata = tuple(expected_metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
- pager = client.list_log_metrics(request={})
+ pager = client.list_log_metrics(request={}, retry=retry, timeout=timeout)
- assert pager._metadata == metadata
+ assert pager._metadata == expected_metadata
+ assert pager._retry == retry
+ assert pager._timeout == timeout
results = list(pager)
assert len(results) == 6
@@ -1009,7 +1517,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"):
def test_list_log_metrics_pages(transport_name: str = "grpc"):
client = MetricsServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=ga_credentials.AnonymousCredentials(),
transport=transport_name,
)
@@ -1051,7 +1559,7 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"):
@pytest.mark.asyncio
async def test_list_log_metrics_async_pager():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1101,7 +1609,7 @@ async def test_list_log_metrics_async_pager():
@pytest.mark.asyncio
async def test_list_log_metrics_async_pages():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials,
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1181,7 +1689,8 @@ def test_get_log_metric(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.GetLogMetricRequest()
+ request = logging_metrics.GetLogMetricRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_metrics.LogMetric)
@@ -1194,20 +1703,109 @@ def test_get_log_metric(request_type, transport: str = "grpc"):
assert response.version == logging_metrics.LogMetric.ApiVersion.V1
-def test_get_log_metric_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_get_log_metric_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = MetricsServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_metrics.GetLogMetricRequest(
+ metric_name="metric_name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call:
- client.get_log_metric()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.get_log_metric(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.GetLogMetricRequest()
+ assert args[0] == logging_metrics.GetLogMetricRequest(
+ metric_name="metric_name_value",
+ )
+
+
+def test_get_log_metric_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.get_log_metric in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc
+ request = {}
+ client.get_log_metric(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.get_log_metric(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_get_log_metric_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.get_log_metric
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.get_log_metric
+ ] = mock_rpc
+
+ request = {}
+ await client.get_log_metric(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.get_log_metric(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1215,7 +1813,7 @@ async def test_get_log_metric_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest
):
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1242,7 +1840,8 @@ async def test_get_log_metric_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.GetLogMetricRequest()
+ request = logging_metrics.GetLogMetricRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_metrics.LogMetric)
@@ -1292,7 +1891,7 @@ def test_get_log_metric_field_headers():
@pytest.mark.asyncio
async def test_get_log_metric_field_headers_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1362,7 +1961,7 @@ def test_get_log_metric_flattened_error():
@pytest.mark.asyncio
async def test_get_log_metric_flattened_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1391,7 +1990,7 @@ async def test_get_log_metric_flattened_async():
@pytest.mark.asyncio
async def test_get_log_metric_flattened_error_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1439,7 +2038,8 @@ def test_create_log_metric(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.CreateLogMetricRequest()
+ request = logging_metrics.CreateLogMetricRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_metrics.LogMetric)
@@ -1452,22 +2052,113 @@ def test_create_log_metric(request_type, transport: str = "grpc"):
assert response.version == logging_metrics.LogMetric.ApiVersion.V1
-def test_create_log_metric_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_create_log_metric_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = MetricsServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_metrics.CreateLogMetricRequest(
+ parent="parent_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_log_metric), "__call__"
) as call:
- client.create_log_metric()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.create_log_metric(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.CreateLogMetricRequest()
+ assert args[0] == logging_metrics.CreateLogMetricRequest(
+ parent="parent_value",
+ )
+
+
+def test_create_log_metric_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.create_log_metric in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.create_log_metric
+ ] = mock_rpc
+ request = {}
+ client.create_log_metric(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.create_log_metric(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_create_log_metric_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.create_log_metric
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.create_log_metric
+ ] = mock_rpc
+
+ request = {}
+ await client.create_log_metric(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.create_log_metric(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1475,7 +2166,7 @@ async def test_create_log_metric_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest
):
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1504,7 +2195,8 @@ async def test_create_log_metric_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.CreateLogMetricRequest()
+ request = logging_metrics.CreateLogMetricRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_metrics.LogMetric)
@@ -1556,7 +2248,7 @@ def test_create_log_metric_field_headers():
@pytest.mark.asyncio
async def test_create_log_metric_field_headers_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1635,7 +2327,7 @@ def test_create_log_metric_flattened_error():
@pytest.mark.asyncio
async def test_create_log_metric_flattened_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1670,7 +2362,7 @@ async def test_create_log_metric_flattened_async():
@pytest.mark.asyncio
async def test_create_log_metric_flattened_error_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1719,7 +2411,8 @@ def test_update_log_metric(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.UpdateLogMetricRequest()
+ request = logging_metrics.UpdateLogMetricRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_metrics.LogMetric)
@@ -1732,22 +2425,113 @@ def test_update_log_metric(request_type, transport: str = "grpc"):
assert response.version == logging_metrics.LogMetric.ApiVersion.V1
-def test_update_log_metric_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
+def test_update_log_metric_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
client = MetricsServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc",
)
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_metrics.UpdateLogMetricRequest(
+ metric_name="metric_name_value",
+ )
+
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_log_metric), "__call__"
) as call:
- client.update_log_metric()
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.update_log_metric(request=request)
call.assert_called()
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.UpdateLogMetricRequest()
+ assert args[0] == logging_metrics.UpdateLogMetricRequest(
+ metric_name="metric_name_value",
+ )
+
+
+def test_update_log_metric_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.update_log_metric in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.update_log_metric
+ ] = mock_rpc
+ request = {}
+ client.update_log_metric(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.update_log_metric(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_update_log_metric_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.update_log_metric
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.update_log_metric
+ ] = mock_rpc
+
+ request = {}
+ await client.update_log_metric(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.update_log_metric(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -1755,7 +2539,7 @@ async def test_update_log_metric_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest
):
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -1784,7 +2568,8 @@ async def test_update_log_metric_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.UpdateLogMetricRequest()
+ request = logging_metrics.UpdateLogMetricRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert isinstance(response, logging_metrics.LogMetric)
@@ -1836,7 +2621,7 @@ def test_update_log_metric_field_headers():
@pytest.mark.asyncio
async def test_update_log_metric_field_headers_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -1915,7 +2700,7 @@ def test_update_log_metric_flattened_error():
@pytest.mark.asyncio
async def test_update_log_metric_flattened_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -1950,7 +2735,7 @@ async def test_update_log_metric_flattened_async():
@pytest.mark.asyncio
async def test_update_log_metric_flattened_error_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -1991,28 +2776,120 @@ def test_delete_log_metric(request_type, transport: str = "grpc"):
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.DeleteLogMetricRequest()
+ request = logging_metrics.DeleteLogMetricRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
-def test_delete_log_metric_empty_call():
- # This test is a coverage failsafe to make sure that totally empty calls,
- # i.e. request == None and no flattened fields passed, work.
- client = MetricsServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc",
- )
+def test_delete_log_metric_non_empty_request_with_auto_populated_field():
+ # This test is a coverage failsafe to make sure that UUID4 fields are
+ # automatically populated, according to AIP-4235, with non-empty requests.
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Populate all string fields in the request which are not UUID4
+ # since we want to check that UUID4 are populated automatically
+ # if they meet the requirements of AIP 4235.
+ request = logging_metrics.DeleteLogMetricRequest(
+ metric_name="metric_name_value",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_log_metric), "__call__"
+ ) as call:
+ call.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client.delete_log_metric(request=request)
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == logging_metrics.DeleteLogMetricRequest(
+ metric_name="metric_name_value",
+ )
+
+
+def test_delete_log_metric_use_cached_wrapped_rpc():
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert client._transport.delete_log_metric in client._transport._wrapped_methods
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.Mock()
+ mock_rpc.return_value.name = (
+ "foo" # operation_request.operation in compute client(s) expect a string.
+ )
+ client._transport._wrapped_methods[
+ client._transport.delete_log_metric
+ ] = mock_rpc
+ request = {}
+ client.delete_log_metric(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ client.delete_log_metric(request)
+
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
+
+
+@pytest.mark.asyncio
+async def test_delete_log_metric_async_use_cached_wrapped_rpc(
+ transport: str = "grpc_asyncio",
+):
+ # Clients should use _prep_wrapped_messages to create cached wrapped rpcs,
+ # instead of constructing them on each call
+ with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn:
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport=transport,
+ )
+
+ # Should wrap all calls on client creation
+ assert wrapper_fn.call_count > 0
+ wrapper_fn.reset_mock()
+
+ # Ensure method has been cached
+ assert (
+ client._client._transport.delete_log_metric
+ in client._client._transport._wrapped_methods
+ )
+
+ # Replace cached wrapped function with mock
+ mock_rpc = mock.AsyncMock()
+ mock_rpc.return_value = mock.Mock()
+ client._client._transport._wrapped_methods[
+ client._client._transport.delete_log_metric
+ ] = mock_rpc
+
+ request = {}
+ await client.delete_log_metric(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert mock_rpc.call_count == 1
+
+ await client.delete_log_metric(request)
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client.transport.delete_log_metric), "__call__"
- ) as call:
- client.delete_log_metric()
- call.assert_called()
- _, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.DeleteLogMetricRequest()
+ # Establish that a new wrapper was not created for this call
+ assert wrapper_fn.call_count == 0
+ assert mock_rpc.call_count == 2
@pytest.mark.asyncio
@@ -2020,7 +2897,7 @@ async def test_delete_log_metric_async(
transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest
):
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2039,7 +2916,8 @@ async def test_delete_log_metric_async(
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == logging_metrics.DeleteLogMetricRequest()
+ request = logging_metrics.DeleteLogMetricRequest()
+ assert args[0] == request
# Establish that the response is the type that we expect.
assert response is None
@@ -2084,7 +2962,7 @@ def test_delete_log_metric_field_headers():
@pytest.mark.asyncio
async def test_delete_log_metric_field_headers_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2156,7 +3034,7 @@ def test_delete_log_metric_flattened_error():
@pytest.mark.asyncio
async def test_delete_log_metric_flattened_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
@@ -2185,7 +3063,7 @@ async def test_delete_log_metric_flattened_async():
@pytest.mark.asyncio
async def test_delete_log_metric_flattened_error_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Attempting to call a method with both a request object and flattened
@@ -2231,7 +3109,7 @@ def test_credentials_transport_error():
)
# It is an error to provide an api_key and a credential.
- options = mock.Mock()
+ options = client_options.ClientOptions()
options.api_key = "api_key"
with pytest.raises(ValueError):
client = MetricsServiceV2Client(
@@ -2288,17 +3166,298 @@ def test_transport_adc(transport_class):
adc.assert_called_once()
-@pytest.mark.parametrize(
- "transport_name",
- [
- "grpc",
- ],
-)
-def test_transport_kind(transport_name):
- transport = MetricsServiceV2Client.get_transport_class(transport_name)(
+def test_transport_kind_grpc():
+ transport = MetricsServiceV2Client.get_transport_class("grpc")(
+ credentials=ga_credentials.AnonymousCredentials()
+ )
+ assert transport.kind == "grpc"
+
+
+def test_initialize_client_w_grpc():
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_list_log_metrics_empty_call_grpc():
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call:
+ call.return_value = logging_metrics.ListLogMetricsResponse()
+ client.list_log_metrics(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.ListLogMetricsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_get_log_metric_empty_call_grpc():
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call:
+ call.return_value = logging_metrics.LogMetric()
+ client.get_log_metric(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.GetLogMetricRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_create_log_metric_empty_call_grpc():
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_log_metric), "__call__"
+ ) as call:
+ call.return_value = logging_metrics.LogMetric()
+ client.create_log_metric(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.CreateLogMetricRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_update_log_metric_empty_call_grpc():
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_log_metric), "__call__"
+ ) as call:
+ call.return_value = logging_metrics.LogMetric()
+ client.update_log_metric(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.UpdateLogMetricRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+def test_delete_log_metric_empty_call_grpc():
+ client = MetricsServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
+ transport="grpc",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_log_metric), "__call__"
+ ) as call:
+ call.return_value = None
+ client.delete_log_metric(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.DeleteLogMetricRequest()
+
+ assert args[0] == request_msg
+
+
+def test_transport_kind_grpc_asyncio():
+ transport = MetricsServiceV2AsyncClient.get_transport_class("grpc_asyncio")(
+ credentials=async_anonymous_credentials()
+ )
+ assert transport.kind == "grpc_asyncio"
+
+
+def test_initialize_client_w_grpc_asyncio():
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ assert client is not None
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_list_log_metrics_empty_call_grpc_asyncio():
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_metrics.ListLogMetricsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+ await client.list_log_metrics(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.ListLogMetricsRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_get_log_metric_empty_call_grpc_asyncio():
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
)
- assert transport.kind == transport_name
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_metrics.LogMetric(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ bucket_name="bucket_name_value",
+ disabled=True,
+ value_extractor="value_extractor_value",
+ version=logging_metrics.LogMetric.ApiVersion.V1,
+ )
+ )
+ await client.get_log_metric(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.GetLogMetricRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_create_log_metric_empty_call_grpc_asyncio():
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_log_metric), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_metrics.LogMetric(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ bucket_name="bucket_name_value",
+ disabled=True,
+ value_extractor="value_extractor_value",
+ version=logging_metrics.LogMetric.ApiVersion.V1,
+ )
+ )
+ await client.create_log_metric(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.CreateLogMetricRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_update_log_metric_empty_call_grpc_asyncio():
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_log_metric), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ logging_metrics.LogMetric(
+ name="name_value",
+ description="description_value",
+ filter="filter_value",
+ bucket_name="bucket_name_value",
+ disabled=True,
+ value_extractor="value_extractor_value",
+ version=logging_metrics.LogMetric.ApiVersion.V1,
+ )
+ )
+ await client.update_log_metric(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.UpdateLogMetricRequest()
+
+ assert args[0] == request_msg
+
+
+# This test is a coverage failsafe to make sure that totally empty calls,
+# i.e. request == None and no flattened fields passed, work.
+@pytest.mark.asyncio
+async def test_delete_log_metric_empty_call_grpc_asyncio():
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(),
+ transport="grpc_asyncio",
+ )
+
+ # Mock the actual call, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_log_metric), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.delete_log_metric(request=None)
+
+ # Establish that the underlying stub method was called.
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ request_msg = logging_metrics.DeleteLogMetricRequest()
+
+ assert args[0] == request_msg
def test_transport_grpc_default():
@@ -2860,20 +4019,6 @@ def test_client_with_default_client_info():
prep.assert_called_once_with(client_info)
-@pytest.mark.asyncio
-async def test_transport_close_async():
- client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
- transport="grpc_asyncio",
- )
- with mock.patch.object(
- type(getattr(client.transport, "grpc_channel")), "close"
- ) as close:
- async with client:
- close.assert_not_called()
- close.assert_called_once()
-
-
def test_cancel_operation(transport: str = "grpc"):
client = MetricsServiceV2Client(
credentials=ga_credentials.AnonymousCredentials(),
@@ -2899,9 +4044,9 @@ def test_cancel_operation(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_cancel_operation_async(transport: str = "grpc"):
+async def test_cancel_operation_async(transport: str = "grpc_asyncio"):
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -2954,7 +4099,7 @@ def test_cancel_operation_field_headers():
@pytest.mark.asyncio
async def test_cancel_operation_field_headers_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -2999,7 +4144,7 @@ def test_cancel_operation_from_dict():
@pytest.mark.asyncio
async def test_cancel_operation_from_dict_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call:
@@ -3038,9 +4183,9 @@ def test_get_operation(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_get_operation_async(transport: str = "grpc"):
+async def test_get_operation_async(transport: str = "grpc_asyncio"):
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3095,7 +4240,7 @@ def test_get_operation_field_headers():
@pytest.mark.asyncio
async def test_get_operation_field_headers_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3142,7 +4287,7 @@ def test_get_operation_from_dict():
@pytest.mark.asyncio
async def test_get_operation_from_dict_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_operation), "__call__") as call:
@@ -3183,9 +4328,9 @@ def test_list_operations(transport: str = "grpc"):
@pytest.mark.asyncio
-async def test_list_operations_async(transport: str = "grpc"):
+async def test_list_operations_async(transport: str = "grpc_asyncio"):
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
transport=transport,
)
@@ -3240,7 +4385,7 @@ def test_list_operations_field_headers():
@pytest.mark.asyncio
async def test_list_operations_field_headers_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
@@ -3287,7 +4432,7 @@ def test_list_operations_from_dict():
@pytest.mark.asyncio
async def test_list_operations_from_dict_async():
client = MetricsServiceV2AsyncClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=async_anonymous_credentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_operations), "__call__") as call:
@@ -3303,21 +4448,29 @@ async def test_list_operations_from_dict_async():
call.assert_called()
-def test_transport_close():
- transports = {
- "grpc": "_grpc_channel",
- }
+def test_transport_close_grpc():
+ client = MetricsServiceV2Client(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
- for transport, close_name in transports.items():
- client = MetricsServiceV2Client(
- credentials=ga_credentials.AnonymousCredentials(), transport=transport
- )
- with mock.patch.object(
- type(getattr(client.transport, close_name)), "close"
- ) as close:
- with client:
- close.assert_not_called()
- close.assert_called_once()
+
+@pytest.mark.asyncio
+async def test_transport_close_grpc_asyncio():
+ client = MetricsServiceV2AsyncClient(
+ credentials=async_anonymous_credentials(), transport="grpc_asyncio"
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "_grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
def test_client_ctx():
@@ -3357,7 +4510,9 @@ def test_api_key_credentials(client_class, transport_class):
patched.assert_called_once_with(
credentials=mock_cred,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=client._DEFAULT_ENDPOINT_TEMPLATE.format(
+ UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE
+ ),
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
diff --git a/tests/unit/handlers/__init__.py b/tests/unit/handlers/__init__.py
index df379f1e9..32eba185f 100644
--- a/tests/unit/handlers/__init__.py
+++ b/tests/unit/handlers/__init__.py
@@ -11,3 +11,44 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
+
+# Utility functions to setup mock OpenTelemetry spans, needed by multiple test
+# suites.
+
+import contextlib
+
+import opentelemetry.context
+import opentelemetry.trace
+
+from opentelemetry.trace import NonRecordingSpan
+from opentelemetry.trace.span import TraceFlags
+
+_OTEL_SPAN_CONTEXT_TRACE_ID = 0x123456789123456789
+_OTEL_SPAN_CONTEXT_SPAN_ID = 0x123456789
+_OTEL_SPAN_CONTEXT_TRACEFLAGS = TraceFlags(TraceFlags.SAMPLED)
+
+_EXPECTED_OTEL_TRACE_ID = "00000000000000123456789123456789"
+_EXPECTED_OTEL_SPAN_ID = "0000000123456789"
+_EXPECTED_OTEL_TRACESAMPLED = True
+
+
+@contextlib.contextmanager
+def _setup_otel_span_context():
+ """Sets up a nonrecording OpenTelemetry span with a mock span context that gets returned
+ by opentelemetry.trace.get_current_span, and returns it as a contextmanager
+ """
+ span_context = opentelemetry.trace.SpanContext(
+ _OTEL_SPAN_CONTEXT_TRACE_ID,
+ _OTEL_SPAN_CONTEXT_SPAN_ID,
+ False,
+ trace_flags=_OTEL_SPAN_CONTEXT_TRACEFLAGS,
+ )
+ ctx = opentelemetry.trace.set_span_in_context(NonRecordingSpan(span_context))
+ tracer = opentelemetry.trace.NoOpTracer()
+ token = opentelemetry.context.attach(ctx)
+ try:
+ with tracer.start_as_current_span("test-span", context=ctx):
+ yield
+ finally:
+ opentelemetry.context.detach(token)
diff --git a/tests/unit/handlers/test__helpers.py b/tests/unit/handlers/test__helpers.py
index 6a7ff245f..d0577cf22 100644
--- a/tests/unit/handlers/test__helpers.py
+++ b/tests/unit/handlers/test__helpers.py
@@ -16,11 +16,22 @@
import mock
+from tests.unit.handlers import (
+ _setup_otel_span_context,
+ _EXPECTED_OTEL_TRACE_ID,
+ _EXPECTED_OTEL_SPAN_ID,
+ _EXPECTED_OTEL_TRACESAMPLED,
+)
+
_FLASK_TRACE_ID = "flask0id"
_FLASK_SPAN_ID = "span0flask"
+_FLASK_SPAN_ID_XCTC_DEC = "12345"
+_FLASK_SPAN_ID_XCTC_HEX = "3039".zfill(16)
_FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"}
_DJANGO_TRACE_ID = "django0id"
_DJANGO_SPAN_ID = "span0django"
+_DJANGO_SPAN_ID_XCTC_DEC = "54321"
+_DJANGO_SPAN_ID_XCTC_HEX = "d431".zfill(16)
_DJANGO_HTTP_REQUEST = {"requestUrl": "https://www.djangoproject.com/"}
@@ -57,8 +68,9 @@ def test_no_context_header(self):
def test_xcloud_header(self):
flask_trace_header = "X_CLOUD_TRACE_CONTEXT"
expected_trace_id = _FLASK_TRACE_ID
- expected_span_id = _FLASK_SPAN_ID
- flask_trace_id = f"{expected_trace_id}/{expected_span_id};o=1"
+ input_span_id = _FLASK_SPAN_ID_XCTC_DEC
+ expected_span_id = _FLASK_SPAN_ID_XCTC_HEX
+ flask_trace_id = f"{expected_trace_id}/{input_span_id};o=1"
app = self.create_app()
context = app.test_request_context(
@@ -166,9 +178,10 @@ def test_xcloud_header(self):
from google.cloud.logging_v2.handlers.middleware import request
django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT"
- expected_span_id = _DJANGO_SPAN_ID
+ input_span_id = _DJANGO_SPAN_ID_XCTC_DEC
+ expected_span_id = _DJANGO_SPAN_ID_XCTC_HEX
expected_trace_id = _DJANGO_TRACE_ID
- django_trace_id = f"{expected_trace_id}/{expected_span_id};o=1"
+ django_trace_id = f"{expected_trace_id}/{input_span_id};o=1"
django_request = RequestFactory().get(
"/", **{django_trace_header: django_trace_id}
@@ -242,6 +255,19 @@ def test_http_request_sparse(self):
self.assertEqual(http_request["requestUrl"], expected_path)
self.assertEqual(http_request["protocol"], "HTTP/1.1")
+ def test_invalid_host_header(self):
+ from django.test import RequestFactory
+ from google.cloud.logging_v2.handlers.middleware import request
+
+ invalid_http_host = "testserver%7d"
+ django_request = RequestFactory().put("/", HTTP_HOST=invalid_http_host)
+ middleware = request.RequestMiddleware(None)
+ middleware(django_request)
+ http_request, *_ = self._call_fut()
+ self.assertEqual(http_request["requestMethod"], "PUT")
+ self.assertIsNone(http_request["requestUrl"])
+ self.assertEqual(http_request["protocol"], "HTTP/1.1")
+
class Test_get_request_data(unittest.TestCase):
@staticmethod
@@ -343,6 +369,120 @@ def test_wo_libraries(self):
output = self._call_fut()
self.assertEqual(output, (None, None, None, False))
+ def test_otel_span_exists_no_request(self):
+ flask_expected = (None, None, None, False)
+ django_expected = (None, None, None, False)
+
+ with _setup_otel_span_context():
+ _, _, output = self._helper(django_expected, flask_expected)
+ self.assertEqual(
+ output,
+ (
+ None,
+ _EXPECTED_OTEL_TRACE_ID,
+ _EXPECTED_OTEL_SPAN_ID,
+ _EXPECTED_OTEL_TRACESAMPLED,
+ ),
+ )
+
+ def test_otel_span_exists_django_request(self):
+ django_expected = (
+ _DJANGO_HTTP_REQUEST,
+ _DJANGO_TRACE_ID,
+ _DJANGO_SPAN_ID,
+ False,
+ )
+ flask_expected = (None, None, None, False)
+
+ with _setup_otel_span_context():
+ _, _, output = self._helper(django_expected, flask_expected)
+ self.assertEqual(
+ output,
+ (
+ _DJANGO_HTTP_REQUEST,
+ _EXPECTED_OTEL_TRACE_ID,
+ _EXPECTED_OTEL_SPAN_ID,
+ _EXPECTED_OTEL_TRACESAMPLED,
+ ),
+ )
+
+ def test_otel_span_exists_flask_request(self):
+ django_expected = (None, None, None, False)
+ flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False)
+
+ with _setup_otel_span_context():
+ _, _, output = self._helper(django_expected, flask_expected)
+ self.assertEqual(
+ output,
+ (
+ _FLASK_HTTP_REQUEST,
+ _EXPECTED_OTEL_TRACE_ID,
+ _EXPECTED_OTEL_SPAN_ID,
+ _EXPECTED_OTEL_TRACESAMPLED,
+ ),
+ )
+
+ def test_otel_span_exists_both_django_and_flask(self):
+ django_expected = (
+ _DJANGO_HTTP_REQUEST,
+ _DJANGO_TRACE_ID,
+ _DJANGO_SPAN_ID,
+ False,
+ )
+ flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False)
+
+ with _setup_otel_span_context():
+ _, _, output = self._helper(django_expected, flask_expected)
+
+ # Django wins
+ self.assertEqual(
+ output,
+ (
+ _DJANGO_HTTP_REQUEST,
+ _EXPECTED_OTEL_TRACE_ID,
+ _EXPECTED_OTEL_SPAN_ID,
+ _EXPECTED_OTEL_TRACESAMPLED,
+ ),
+ )
+
+ def test_no_otel_span_no_requests(self):
+ flask_expected = (None, None, None, False)
+ django_expected = (None, None, None, False)
+ _, _, output = self._helper(django_expected, flask_expected)
+ self.assertEqual(output, (None, None, None, False))
+
+ def test_no_otel_span_django_request(self):
+ django_expected = (
+ _DJANGO_HTTP_REQUEST,
+ _DJANGO_TRACE_ID,
+ _DJANGO_SPAN_ID,
+ False,
+ )
+ flask_expected = (None, None, None, False)
+ _, _, output = self._helper(django_expected, flask_expected)
+ self.assertEqual(output, django_expected)
+
+ def test_no_otel_span_flask_request(self):
+ django_expected = (None, None, None, False)
+ flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False)
+ _, _, output = self._helper(django_expected, flask_expected)
+
+ # Django wins
+ self.assertEqual(output, flask_expected)
+
+ def test_no_otel_span_both_django_and_flask(self):
+ django_expected = (
+ _DJANGO_HTTP_REQUEST,
+ _DJANGO_TRACE_ID,
+ _DJANGO_SPAN_ID,
+ False,
+ )
+ flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False)
+ _, _, output = self._helper(django_expected, flask_expected)
+
+ # Django wins
+ self.assertEqual(output, django_expected)
+
class Test__parse_xcloud_trace(unittest.TestCase):
@staticmethod
@@ -367,25 +507,40 @@ def test_no_span(self):
self.assertEqual(sampled, False)
def test_no_trace(self):
- header = "/12345"
+ input_span = "12345"
+ expected_span = "3039".zfill(16)
+ header = f"/{input_span}"
trace_id, span_id, sampled = self._call_fut(header)
self.assertIsNone(trace_id)
- self.assertEqual(span_id, "12345")
+ self.assertEqual(span_id, expected_span)
self.assertEqual(sampled, False)
def test_with_span(self):
expected_trace = "12345"
- expected_span = "67890"
- header = f"{expected_trace}/{expected_span}"
+ input_span = "67890"
+ expected_span = "10932".zfill(16)
+ header = f"{expected_trace}/{input_span}"
trace_id, span_id, sampled = self._call_fut(header)
self.assertEqual(trace_id, expected_trace)
self.assertEqual(span_id, expected_span)
self.assertEqual(sampled, False)
+ def test_with_span_decimal_not_in_bounds(self):
+ input_spans = ["0", "9" * 100]
+
+ for input_span in input_spans:
+ expected_trace = "12345"
+ header = f"{expected_trace}/{input_span}"
+ trace_id, span_id, sampled = self._call_fut(header)
+ self.assertEqual(trace_id, expected_trace)
+ self.assertIsNone(span_id)
+ self.assertEqual(sampled, False)
+
def test_with_extra_characters(self):
expected_trace = "12345"
- expected_span = "67890"
- header = f"{expected_trace}/{expected_span};abc"
+ input_span = "67890"
+ expected_span = "10932".zfill(16)
+ header = f"{expected_trace}/{input_span};abc"
trace_id, span_id, sampled = self._call_fut(header)
self.assertEqual(trace_id, expected_trace)
self.assertEqual(span_id, expected_span)
@@ -393,8 +548,9 @@ def test_with_extra_characters(self):
def test_with_explicit_no_sampled(self):
expected_trace = "12345"
- expected_span = "67890"
- header = f"{expected_trace}/{expected_span};o=0"
+ input_span = "67890"
+ expected_span = "10932".zfill(16)
+ header = f"{expected_trace}/{input_span};o=0"
trace_id, span_id, sampled = self._call_fut(header)
self.assertEqual(trace_id, expected_trace)
self.assertEqual(span_id, expected_span)
@@ -402,8 +558,9 @@ def test_with_explicit_no_sampled(self):
def test_with__sampled(self):
expected_trace = "12345"
- expected_span = "67890"
- header = f"{expected_trace}/{expected_span};o=1"
+ input_span = "67890"
+ expected_span = "10932".zfill(16)
+ header = f"{expected_trace}/{input_span};o=1"
trace_id, span_id, sampled = self._call_fut(header)
self.assertEqual(trace_id, expected_trace)
self.assertEqual(span_id, expected_span)
@@ -464,3 +621,25 @@ def test_invalid_headers(self):
self.assertIsNone(trace_id)
self.assertIsNone(span_id)
self.assertEqual(sampled, False)
+
+
+class Test__parse_open_telemetry_data(unittest.TestCase):
+ @staticmethod
+ def _call_fut():
+ from google.cloud.logging_v2.handlers import _helpers
+
+ trace, span, sampled = _helpers._retrieve_current_open_telemetry_span()
+ return trace, span, sampled
+
+ def test_no_op(self):
+ trace_id, span_id, sampled = self._call_fut()
+ self.assertIsNone(trace_id)
+ self.assertIsNone(span_id)
+ self.assertEqual(sampled, False)
+
+ def test_span_exists(self):
+ with _setup_otel_span_context():
+ trace_id, span_id, sampled = self._call_fut()
+ self.assertEqual(trace_id, _EXPECTED_OTEL_TRACE_ID)
+ self.assertEqual(span_id, _EXPECTED_OTEL_SPAN_ID)
+ self.assertEqual(sampled, _EXPECTED_OTEL_TRACESAMPLED)
diff --git a/tests/unit/handlers/test__monitored_resources.py b/tests/unit/handlers/test__monitored_resources.py
index 3c62cba88..28f064b7b 100644
--- a/tests/unit/handlers/test__monitored_resources.py
+++ b/tests/unit/handlers/test__monitored_resources.py
@@ -12,37 +12,30 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
import unittest
+import logging
import mock
import os
import functools
-from google.cloud.logging_v2.handlers._monitored_resources import (
- _create_functions_resource,
-)
from google.cloud.logging_v2.handlers._monitored_resources import (
_create_app_engine_resource,
-)
-from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_functions_resource,
_create_kubernetes_resource,
-)
-from google.cloud.logging_v2.handlers._monitored_resources import (
- _create_cloud_run_resource,
-)
-from google.cloud.logging_v2.handlers._monitored_resources import (
+ _create_cloud_run_service_resource,
+ _create_cloud_run_job_resource,
_create_compute_resource,
-)
-from google.cloud.logging_v2.handlers._monitored_resources import (
_create_global_resource,
+ detect_resource,
+ add_resource_labels,
)
-from google.cloud.logging_v2.handlers._monitored_resources import detect_resource
from google.cloud.logging_v2.handlers import _monitored_resources
from google.cloud.logging_v2.resource import Resource
class Test_Create_Resources(unittest.TestCase):
-
PROJECT = "test-project"
LOCATION = "test-location"
NAME = "test-name"
@@ -54,6 +47,7 @@ def _mock_metadata(self, endpoint):
if (
endpoint == _monitored_resources._ZONE_ID
or endpoint == _monitored_resources._REGION_ID
+ or endpoint == _monitored_resources._GKE_CLUSTER_LOCATION
):
return self.LOCATION
elif (
@@ -132,7 +126,6 @@ def test_functions_resource_no_name(self):
self.assertEqual(func_resource.labels["function_name"], "")
def test_create_kubernetes_resource(self):
-
patch = mock.patch(
"google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
wraps=self._mock_metadata,
@@ -160,7 +153,7 @@ def test_compute_resource(self):
self.assertEqual(resource.labels["instance_id"], self.NAME)
self.assertEqual(resource.labels["zone"], self.LOCATION)
- def test_cloud_run_resource(self):
+ def test_cloud_run_service_resource(self):
patch = mock.patch(
"google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
wraps=self._mock_metadata,
@@ -169,7 +162,7 @@ def test_cloud_run_resource(self):
os.environ[_monitored_resources._CLOUD_RUN_REVISION_ID] = self.VERSION
os.environ[_monitored_resources._CLOUD_RUN_CONFIGURATION_ID] = self.CONFIG
with patch:
- resource = _create_cloud_run_resource()
+ resource = _create_cloud_run_service_resource()
self.assertIsInstance(resource, Resource)
self.assertEqual(resource.type, "cloud_run_revision")
self.assertEqual(resource.labels["project_id"], self.PROJECT)
@@ -178,6 +171,23 @@ def test_cloud_run_resource(self):
self.assertEqual(resource.labels["configuration_name"], self.CONFIG)
self.assertEqual(resource.labels["location"], self.LOCATION)
+ def test_cloud_run_job_resource(self):
+ patch = mock.patch(
+ "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
+ wraps=self._mock_metadata,
+ )
+ os.environ[_monitored_resources._CLOUD_RUN_JOB_ID] = self.NAME
+ os.environ[_monitored_resources._CLOUD_RUN_EXECUTION_ID] = self.VERSION
+ os.environ[_monitored_resources._CLOUD_RUN_TASK_INDEX] = self.CONFIG
+ os.environ[_monitored_resources._CLOUD_RUN_TASK_ATTEMPT] = self.CLUSTER
+ with patch:
+ resource = _create_cloud_run_job_resource()
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "cloud_run_job")
+ self.assertEqual(resource.labels["project_id"], self.PROJECT)
+ self.assertEqual(resource.labels["job_name"], self.NAME)
+ self.assertEqual(resource.labels["location"], self.LOCATION)
+
def test_app_engine_resource(self):
patch = mock.patch(
"google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
@@ -214,7 +224,8 @@ def test_with_no_project_from_server(self):
resource_fns = [
_global_resource_patched,
_create_app_engine_resource,
- _create_cloud_run_resource,
+ _create_cloud_run_service_resource,
+ _create_cloud_run_job_resource,
_create_compute_resource,
_create_kubernetes_resource,
_create_functions_resource,
@@ -225,7 +236,6 @@ def test_with_no_project_from_server(self):
class Test_Resource_Detection(unittest.TestCase):
-
PROJECT = "test-project"
def _mock_k8s_metadata(self, endpoint):
@@ -285,13 +295,20 @@ def test_detect_legacy_functions(self):
self.assertIsInstance(resource, Resource)
self.assertEqual(resource.type, "cloud_function")
- def test_detect_cloud_run(self):
- for env in _monitored_resources._CLOUD_RUN_ENV_VARS:
+ def test_detect_cloud_run_service(self):
+ for env in _monitored_resources._CLOUD_RUN_SERVICE_ENV_VARS:
os.environ[env] = "TRUE"
resource = detect_resource(self.PROJECT)
self.assertIsInstance(resource, Resource)
self.assertEqual(resource.type, "cloud_run_revision")
+ def test_detect_cloud_run_job(self):
+ for env in _monitored_resources._CLOUD_RUN_JOB_ENV_VARS:
+ os.environ[env] = "TRUE"
+ resource = detect_resource(self.PROJECT)
+ self.assertIsInstance(resource, Resource)
+ self.assertEqual(resource.type, "cloud_run_job")
+
def test_detect_compute_engine(self):
patch = mock.patch(
"google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
@@ -327,3 +344,45 @@ def test_detect_partial_data(self):
# project id not returned from metadata serve
# should be empty string
self.assertEqual(resource.labels["project_id"], "")
+
+
+@pytest.mark.parametrize(
+ "resource_type,os_environ,record_attrs,expected_labels",
+ [
+ (
+ _monitored_resources._GAE_RESOURCE_TYPE,
+ {},
+ {"_trace": "trace_id"},
+ {_monitored_resources._GAE_TRACE_ID_LABEL: "trace_id"},
+ ),
+ (
+ _monitored_resources._CLOUD_RUN_JOB_RESOURCE_TYPE,
+ {
+ _monitored_resources._CLOUD_RUN_EXECUTION_ID: "test_job_12345",
+ _monitored_resources._CLOUD_RUN_TASK_INDEX: "1",
+ _monitored_resources._CLOUD_RUN_TASK_ATTEMPT: "12",
+ },
+ {},
+ {
+ _monitored_resources._CLOUD_RUN_JOBS_EXECUTION_NAME_LABEL: "test_job_12345",
+ _monitored_resources._CLOUD_RUN_JOBS_TASK_INDEX_LABEL: "1",
+ _monitored_resources._CLOUD_RUN_JOBS_TASK_ATTEMPT_LABEL: "12",
+ },
+ ),
+ ("global", {}, {}, {}),
+ ],
+)
+def test_add_resource_labels(resource_type, os_environ, record_attrs, expected_labels):
+ os.environ.clear()
+ record = logging.LogRecord("logname", None, None, None, "test", None, None)
+
+ resource = Resource(type=resource_type, labels={})
+
+ for attr, val in record_attrs.items():
+ setattr(record, attr, val)
+
+ os.environ.update(os_environ)
+
+ labels = add_resource_labels(resource, record)
+
+ assert expected_labels == labels
diff --git a/tests/unit/handlers/test_app_engine.py b/tests/unit/handlers/test_app_engine.py
index 8eedfad9b..38d607e99 100644
--- a/tests/unit/handlers/test_app_engine.py
+++ b/tests/unit/handlers/test_app_engine.py
@@ -13,6 +13,7 @@
# limitations under the License.
import logging
+import pytest
import unittest
import mock
@@ -46,6 +47,9 @@ def test_constructor_w_gae_standard_env(self):
), mock.patch(
"google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
return_value=self.PROJECT,
+ ), pytest.warns(
+ DeprecationWarning,
+ match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead",
):
handler = self._make_one(client, transport=_Transport)
@@ -78,6 +82,9 @@ def test_constructor_w_gae_flex_env(self):
), mock.patch(
"google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server",
return_value=self.PROJECT,
+ ), pytest.warns(
+ DeprecationWarning,
+ match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead",
):
handler = self._make_one(
client, name=name, transport=_Transport, stream=stream
@@ -99,7 +106,10 @@ def test_emit(self):
"google.cloud.logging_v2.handlers.app_engine.get_request_data",
return_value=(expected_http_request, trace_id, None, None),
)
- with get_request_patch:
+ with get_request_patch, pytest.warns(
+ DeprecationWarning,
+ match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead",
+ ):
# library integrations mocked to return test data
client = mock.Mock(project=self.PROJECT, spec=["project"])
handler = self._make_one(client, transport=_Transport)
@@ -137,7 +147,10 @@ def test_emit_manual_field_override(self):
"google.cloud.logging_v2.handlers.app_engine.get_request_data",
return_value=(inferred_http_request, inferred_trace_id, None, None),
)
- with get_request_patch:
+ with get_request_patch, pytest.warns(
+ DeprecationWarning,
+ match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead",
+ ):
# library integrations mocked to return test data
client = mock.Mock(project=self.PROJECT, spec=["project"])
handler = self._make_one(client, transport=_Transport)
@@ -153,7 +166,7 @@ def test_emit_manual_field_override(self):
setattr(record, "trace", expected_trace)
expected_span = "456"
setattr(record, "span_id", expected_span)
- expected_http = {"reuqest_url": "manual"}
+ expected_http = {"request_url": "manual"}
setattr(record, "http_request", expected_http)
expected_resource = Resource(type="test", labels={})
setattr(record, "resource", expected_resource)
@@ -197,12 +210,20 @@ def test_get_gae_labels_with_label(self):
from google.cloud.logging_v2.handlers import app_engine
trace_id = "test-gae-trace-id"
- gae_labels = self._get_gae_labels_helper(trace_id)
+ with pytest.warns(
+ DeprecationWarning,
+ match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead",
+ ):
+ gae_labels = self._get_gae_labels_helper(trace_id)
expected_labels = {app_engine._TRACE_ID_LABEL: trace_id}
self.assertEqual(gae_labels, expected_labels)
def test_get_gae_labels_without_label(self):
- gae_labels = self._get_gae_labels_helper(None)
+ with pytest.warns(
+ DeprecationWarning,
+ match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead",
+ ):
+ gae_labels = self._get_gae_labels_helper(None)
self.assertEqual(gae_labels, {})
diff --git a/tests/unit/handlers/test_container_engine.py b/tests/unit/handlers/test_container_engine.py
index 280ab9cf0..5c814c53d 100644
--- a/tests/unit/handlers/test_container_engine.py
+++ b/tests/unit/handlers/test_container_engine.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import pytest
import unittest
@@ -27,18 +28,30 @@ def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_ctor_defaults(self):
- handler = self._make_one()
+ with pytest.warns(
+ DeprecationWarning,
+ match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead",
+ ):
+ handler = self._make_one()
self.assertIsNone(handler.name)
def test_ctor_w_name(self):
- handler = self._make_one(name="foo")
+ with pytest.warns(
+ DeprecationWarning,
+ match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead",
+ ):
+ handler = self._make_one(name="foo")
self.assertEqual(handler.name, "foo")
def test_format(self):
import logging
import json
- handler = self._make_one()
+ with pytest.warns(
+ DeprecationWarning,
+ match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead",
+ ):
+ handler = self._make_one()
logname = "loggername"
message = "hello world,嗨 世界"
record = logging.LogRecord(
@@ -51,6 +64,10 @@ def test_format(self):
"thread": record.thread,
"severity": record.levelname,
}
- payload = handler.format(record)
+ with pytest.warns(
+ DeprecationWarning,
+ match="format_stackdriver_json is deprecated. Use StructuredLogHandler instead",
+ ):
+ payload = handler.format(record)
self.assertEqual(payload, json.dumps(expected_payload, ensure_ascii=False))
diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py
index 1e431f1aa..3f25929e2 100644
--- a/tests/unit/handlers/test_handlers.py
+++ b/tests/unit/handlers/test_handlers.py
@@ -18,14 +18,25 @@
import mock
import json
+from google.cloud.logging_v2.handlers.handlers import (
+ _INTERNAL_LOGGERS,
+ EXCLUDED_LOGGER_DEFAULTS,
+)
+
from google.cloud.logging_v2.handlers._monitored_resources import (
_FUNCTION_ENV_VARS,
_GAE_ENV_VARS,
)
+from tests.unit.handlers import (
+ _setup_otel_span_context,
+ _EXPECTED_OTEL_TRACE_ID,
+ _EXPECTED_OTEL_SPAN_ID,
+ _EXPECTED_OTEL_TRACESAMPLED,
+)
-class TestCloudLoggingFilter(unittest.TestCase):
+class TestCloudLoggingFilter(unittest.TestCase):
PROJECT = "PROJECT"
@staticmethod
@@ -129,7 +140,7 @@ def test_minimal_record(self):
self.assertIsNone(record._labels)
self.assertEqual(record._labels_str, "{}")
- def test_record_with_request(self):
+ def test_record_with_xctc_request(self):
"""
test filter adds http request data when available
"""
@@ -150,8 +161,9 @@ def test_record_with_request(self):
expected_path = "http://testserver/123"
expected_agent = "Mozilla/5.0"
expected_trace = "123"
- expected_span = "456"
- combined_trace = f"{expected_trace}/{expected_span};o=1"
+ input_span = "456"
+ expected_span = "1c8".zfill(16)
+ combined_trace = f"{expected_trace}/{input_span};o=1"
expected_request = {
"requestMethod": "GET",
"requestUrl": expected_path,
@@ -226,6 +238,136 @@ def test_record_with_traceparent_request(self):
self.assertEqual(record._http_request, expected_request)
self.assertEqual(record._http_request_str, json.dumps(expected_request))
+ def test_record_with_opentelemetry_span_no_request(self):
+ filter_obj = self._make_one()
+ record = logging.LogRecord(
+ None,
+ logging.INFO,
+ None,
+ None,
+ None,
+ None,
+ None,
+ )
+ record.created = None
+
+ with _setup_otel_span_context():
+ success = filter_obj.filter(record)
+ self.assertTrue(success)
+
+ self.assertEqual(record._trace, _EXPECTED_OTEL_TRACE_ID)
+ self.assertEqual(record._trace_str, _EXPECTED_OTEL_TRACE_ID)
+ self.assertEqual(record._span_id, _EXPECTED_OTEL_SPAN_ID)
+ self.assertEqual(record._span_id_str, _EXPECTED_OTEL_SPAN_ID)
+ self.assertEqual(record._trace_sampled, _EXPECTED_OTEL_TRACESAMPLED)
+ self.assertEqual(record._trace_sampled_str, "true")
+ self.assertIsNone(record._http_request)
+ self.assertEqual(record._http_request_str, "{}")
+
+ def test_record_with_opentelemetry_span_and_request(self):
+ filter_obj = self._make_one()
+ record = logging.LogRecord(
+ None,
+ logging.INFO,
+ None,
+ None,
+ None,
+ None,
+ None,
+ )
+ record.created = None
+ http_path = "http://testserver/123"
+ http_agent = "Mozilla/5.0"
+ http_trace = "123"
+ http_span = "456"
+ combined_trace = f"{http_trace}/{http_span};o=1"
+ expected_request = {
+ "requestMethod": "GET",
+ "requestUrl": http_path,
+ "userAgent": http_agent,
+ "protocol": "HTTP/1.1",
+ }
+
+ app = self.create_app()
+ with app.test_request_context(
+ http_path,
+ headers={
+ "User-Agent": http_agent,
+ "X_CLOUD_TRACE_CONTEXT": combined_trace,
+ },
+ ):
+ with _setup_otel_span_context():
+ success = filter_obj.filter(record)
+ self.assertTrue(success)
+
+ self.assertEqual(record._trace, _EXPECTED_OTEL_TRACE_ID)
+ self.assertEqual(record._trace_str, _EXPECTED_OTEL_TRACE_ID)
+ self.assertEqual(record._span_id, _EXPECTED_OTEL_SPAN_ID)
+ self.assertEqual(record._span_id_str, _EXPECTED_OTEL_SPAN_ID)
+ self.assertEqual(record._trace_sampled, _EXPECTED_OTEL_TRACESAMPLED)
+ self.assertEqual(record._trace_sampled_str, "true")
+
+ self.assertEqual(record._http_request, expected_request)
+ self.assertEqual(record._http_request_str, json.dumps(expected_request))
+
+ def test_record_with_opentelemetry_span_and_request_with_overrides(self):
+ """
+ sort of does what the test after this one does, but more in the context of OTel precedence
+ """
+ filter_obj = self._make_one()
+ record = logging.LogRecord(
+ None,
+ logging.INFO,
+ None,
+ None,
+ None,
+ None,
+ None,
+ )
+ record.created = None
+ http_path = "http://testserver/123"
+ http_agent = "Mozilla/5.0"
+ http_trace = "123"
+ http_span = "456"
+ combined_trace = f"{http_trace}/{http_span};o=1"
+ expected_request = {
+ "requestMethod": "GET",
+ "requestUrl": http_path,
+ "userAgent": http_agent,
+ "protocol": "HTTP/1.1",
+ }
+
+ overwritten_trace = "01234"
+ overwritten_span = "43210"
+ overwritten_tracesampled = False
+ record.trace = overwritten_trace
+ record.span_id = overwritten_span
+ record.trace_sampled = overwritten_tracesampled
+
+ app = self.create_app()
+ with app.test_request_context(
+ http_path,
+ headers={
+ "User-Agent": http_agent,
+ "X_CLOUD_TRACE_CONTEXT": combined_trace,
+ },
+ ):
+ with _setup_otel_span_context():
+ success = filter_obj.filter(record)
+ self.assertTrue(success)
+
+ self.assertEqual(record._trace, overwritten_trace)
+ self.assertEqual(record._trace_str, overwritten_trace)
+ self.assertEqual(record._span_id, overwritten_span)
+ self.assertEqual(record._span_id_str, overwritten_span)
+ self.assertFalse(record._trace_sampled)
+ self.assertEqual(
+ record._trace_sampled_str, json.dumps(overwritten_tracesampled)
+ )
+
+ self.assertEqual(record._http_request, expected_request)
+ self.assertEqual(record._http_request_str, json.dumps(expected_request))
+
def test_user_overrides(self):
"""
ensure user can override fields
@@ -291,7 +433,6 @@ def test_user_overrides(self):
class TestCloudLoggingHandler(unittest.TestCase):
-
PROJECT = "PROJECT"
@staticmethod
@@ -320,6 +461,7 @@ def test_ctor_defaults(self):
self.assertEqual(handler.name, DEFAULT_LOGGER_NAME)
self.assertIs(handler.client, client)
self.assertIsInstance(handler.transport, _Transport)
+ self.assertTrue(handler._transport_open)
self.assertIs(handler.transport.client, client)
self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME)
global_resource = _create_global_resource(self.PROJECT)
@@ -327,6 +469,17 @@ def test_ctor_defaults(self):
self.assertIsNone(handler.labels)
self.assertIs(handler.stream, sys.stderr)
+ def test_add_handler_to_client_handlers(self):
+ from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE
+
+ client = _Client(self.PROJECT)
+ handler = self._make_one(
+ client,
+ transport=_Transport,
+ resource=_GLOBAL_RESOURCE,
+ )
+ self.assertIn(handler, client._handlers)
+
def test_ctor_explicit(self):
import io
from google.cloud.logging import Resource
@@ -432,7 +585,7 @@ def test_emit_manual_field_override(self):
setattr(record, "span_id", expected_span)
expected_sampled = True
setattr(record, "trace_sampled", expected_sampled)
- expected_http = {"reuqest_url": "manual"}
+ expected_http = {"request_url": "manual"}
setattr(record, "http_request", expected_http)
expected_source = {"file": "test-file"}
setattr(record, "source_location", expected_source)
@@ -649,6 +802,56 @@ def test_emit_with_encoded_json(self):
),
)
+ def test_emit_after_close(self):
+ from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE
+
+ client = _Client(self.PROJECT)
+ handler = self._make_one(
+ client, transport=_Transport, resource=_GLOBAL_RESOURCE
+ )
+ logname = "loggername"
+ message = "hello world"
+ record = logging.LogRecord(
+ logname, logging.INFO, None, None, message, None, None
+ )
+ handler.handle(record)
+ old_transport = handler.transport
+ self.assertEqual(
+ handler.transport.send_called_with,
+ (
+ record,
+ message,
+ _GLOBAL_RESOURCE,
+ {"python_logger": logname},
+ None,
+ None,
+ False,
+ None,
+ None,
+ ),
+ )
+
+ handler.close()
+ self.assertFalse(handler._transport_open)
+
+ handler.handle(record)
+ self.assertTrue(handler._transport_open)
+ self.assertNotEqual(handler.transport, old_transport)
+ self.assertEqual(
+ handler.transport.send_called_with,
+ (
+ record,
+ message,
+ _GLOBAL_RESOURCE,
+ {"python_logger": logname},
+ None,
+ None,
+ False,
+ None,
+ None,
+ ),
+ )
+
def test_format_with_arguments(self):
"""
Handler should support format string arguments
@@ -684,6 +887,24 @@ def test_format_with_arguments(self):
),
)
+ def test_close(self):
+ from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE
+
+ client = _Client(self.PROJECT)
+ handler = self._make_one(
+ client,
+ transport=_Transport,
+ resource=_GLOBAL_RESOURCE,
+ )
+ old_transport = handler.transport
+ handler.close()
+ self.assertFalse(handler._transport_open)
+ self.assertTrue(old_transport.close_called)
+
+ # second call to close shouldn't throw an exception
+ handler.close()
+ self.assertFalse(handler._transport_open)
+
class TestFormatAndParseMessage(unittest.TestCase):
def test_none(self):
@@ -859,7 +1080,7 @@ def test_json_fields_input_unmodified(self):
_format_and_parse_message(record, handler)
# ensure json_fields has no side-effects
self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys()))
- for (key, value) in json_fields_orig.items():
+ for key, value in json_fields_orig.items():
self.assertEqual(
value, json_fields[key], f"expected_payload[{key}] != result[{key}]"
)
@@ -869,7 +1090,7 @@ class TestSetupLogging(unittest.TestCase):
def _call_fut(self, handler, excludes=None):
from google.cloud.logging.handlers import setup_logging
- if excludes:
+ if excludes is not None:
return setup_logging(handler, excluded_loggers=excludes)
else:
return setup_logging(handler)
@@ -895,6 +1116,24 @@ def test_setup_logging_excludes(self):
self.assertNotIn(handler, excluded_logger.handlers)
self.assertFalse(excluded_logger.propagate)
+ def test_setup_logging_internal_loggers_no_excludes(self):
+ handler = _Handler(logging.INFO)
+ self._call_fut(handler, excludes=())
+
+ # Test that excluded logger defaults can be included, but internal
+ # loggers can't be.
+ for logger_name in _INTERNAL_LOGGERS:
+ logger = logging.getLogger(logger_name)
+ self.assertNotIn(handler, logger.handlers)
+ self.assertFalse(logger.propagate)
+
+ logger = logging.getLogger("logging")
+ self.assertTrue(logger.propagate)
+
+ for logger_name in EXCLUDED_LOGGER_DEFAULTS:
+ logger = logging.getLogger(logger_name)
+ self.assertTrue(logger.propagate)
+
@patch.dict("os.environ", {envar: "1" for envar in _FUNCTION_ENV_VARS})
def test_remove_handlers_gcf(self):
logger = logging.getLogger()
@@ -941,10 +1180,18 @@ def test_keep_handlers_others(self):
def setUp(self):
self._handlers_cache = logging.getLogger().handlers[:]
+ # reset the logging manager every time so that we're not reusing loggers
+ # across different test cases.
+ self._logger_manager = logging.Logger.manager
+ logging.Logger.manager = logging.Manager(logging.Logger.root)
+
def tearDown(self):
# cleanup handlers
logging.getLogger().handlers = self._handlers_cache[:]
+ # restore the old logging manager.
+ logging.Logger.manager = self._logger_manager
+
class _Handler(object):
def __init__(self, level):
@@ -960,12 +1207,14 @@ def release(self):
class _Client(object):
def __init__(self, project):
self.project = project
+ self._handlers = set()
class _Transport(object):
def __init__(self, client, name, resource=None):
self.client = client
self.name = name
+ self.close_called = False
def send(
self,
@@ -990,3 +1239,6 @@ def send(
http_request,
source_location,
)
+
+ def close(self):
+ self.close_called = True
diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py
index 353530ed1..908758749 100644
--- a/tests/unit/handlers/test_structured_log.py
+++ b/tests/unit/handlers/test_structured_log.py
@@ -86,7 +86,7 @@ def test_format(self):
}
handler.filter(record)
result = json.loads(handler.format(record))
- for (key, value) in expected_payload.items():
+ for key, value in expected_payload.items():
self.assertEqual(value, result[key])
self.assertEqual(
len(expected_payload.keys()),
@@ -121,7 +121,7 @@ def test_format_minimal(self):
handler.filter(record)
result = json.loads(handler.format(record))
self.assertEqual(set(expected_payload.keys()), set(result.keys()))
- for (key, value) in expected_payload.items():
+ for key, value in expected_payload.items():
self.assertEqual(
value, result[key], f"expected_payload[{key}] != result[{key}]"
)
@@ -304,7 +304,7 @@ def test_format_with_reserved_json_field(self):
handler.filter(record)
result = json.loads(handler.format(record))
self.assertEqual(set(expected_payload.keys()), set(result.keys()))
- for (key, value) in expected_payload.items():
+ for key, value in expected_payload.items():
self.assertEqual(
value, result[key], f"expected_payload[{key}] != result[{key}]"
)
@@ -382,7 +382,7 @@ def test_format_with_arguments(self):
result = handler.format(record)
self.assertIn(expected_result, result)
- def test_format_with_request(self):
+ def test_format_with_xctc_request(self):
import logging
import json
@@ -393,8 +393,9 @@ def test_format_with_request(self):
expected_path = "http://testserver/123"
expected_agent = "Mozilla/5.0"
expected_trace = "123"
- expected_span = "456"
- trace_header = f"{expected_trace}/{expected_span};o=1"
+ input_span = "456"
+ expected_span = "1c8".zfill(16)
+ trace_header = f"{expected_trace}/{input_span};o=1"
expected_payload = {
"logging.googleapis.com/trace": expected_trace,
"logging.googleapis.com/spanId": expected_span,
@@ -417,7 +418,7 @@ def test_format_with_request(self):
):
handler.filter(record)
result = json.loads(handler.format(record))
- for (key, value) in expected_payload.items():
+ for key, value in expected_payload.items():
self.assertEqual(value, result[key])
def test_format_with_traceparent(self):
@@ -452,14 +453,14 @@ def test_format_with_traceparent(self):
):
handler.filter(record)
result = json.loads(handler.format(record))
- for (key, value) in expected_payload.items():
+ for key, value in expected_payload.items():
self.assertEqual(value, result[key])
def test_format_overrides(self):
"""
Allow users to override log fields using `logging.info("", extra={})`
- If supported fields were overriden by the user, those choices should
+ If supported fields were overridden by the user, those choices should
take precedence.
"""
import logging
@@ -509,7 +510,108 @@ def test_format_overrides(self):
)
handler.filter(record)
result = json.loads(handler.format(record))
- for (key, value) in expected_payload.items():
+ for key, value in expected_payload.items():
+ self.assertEqual(value, result[key])
+
+ def test_format_with_opentelemetry_span(self):
+ import logging
+ import json
+
+ from tests.unit.handlers import (
+ _setup_otel_span_context,
+ _EXPECTED_OTEL_TRACE_ID,
+ _EXPECTED_OTEL_SPAN_ID,
+ _EXPECTED_OTEL_TRACESAMPLED,
+ )
+
+ handler = self._make_one()
+ logname = "loggername"
+ message = "hello world,嗨 世界"
+ record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None)
+ expected_payload = {
+ "logging.googleapis.com/trace": _EXPECTED_OTEL_TRACE_ID,
+ "logging.googleapis.com/spanId": _EXPECTED_OTEL_SPAN_ID,
+ "logging.googleapis.com/trace_sampled": _EXPECTED_OTEL_TRACESAMPLED,
+ }
+
+ with _setup_otel_span_context():
+ handler.filter(record)
+ result = json.loads(handler.format(record))
+ for key, value in expected_payload.items():
+ self.assertEqual(value, result[key])
+
+ def test_format_with_opentelemetry_span_and_request(self):
+ import logging
+ import json
+
+ from tests.unit.handlers import (
+ _setup_otel_span_context,
+ _EXPECTED_OTEL_TRACE_ID,
+ _EXPECTED_OTEL_SPAN_ID,
+ _EXPECTED_OTEL_TRACESAMPLED,
+ )
+
+ handler = self._make_one()
+ logname = "loggername"
+ message = "hello world,嗨 世界"
+ record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None)
+ expected_path = "http://testserver/123"
+ expected_agent = "Mozilla/5.0"
+ http_trace = "123"
+ http_span = "456"
+ trace_header = f"{http_trace}/{http_span};o=1"
+ expected_payload = {
+ "logging.googleapis.com/trace": _EXPECTED_OTEL_TRACE_ID,
+ "logging.googleapis.com/spanId": _EXPECTED_OTEL_SPAN_ID,
+ "logging.googleapis.com/trace_sampled": _EXPECTED_OTEL_TRACESAMPLED,
+ "httpRequest": {
+ "requestMethod": "GET",
+ "requestUrl": expected_path,
+ "userAgent": expected_agent,
+ "protocol": "HTTP/1.1",
+ },
+ }
+
+ app = self.create_app()
+ with app.test_request_context(
+ expected_path,
+ headers={
+ "User-Agent": expected_agent,
+ "X_CLOUD_TRACE_CONTEXT": trace_header,
+ },
+ ):
+ with _setup_otel_span_context():
+ handler.filter(record)
+ result = json.loads(handler.format(record))
+ for key, value in expected_payload.items():
+ self.assertEqual(value, result[key])
+
+ def test_format_with_opentelemetry_span_and_overrides(self):
+ import logging
+ import json
+
+ from tests.unit.handlers import _setup_otel_span_context
+
+ handler = self._make_one()
+ logname = "loggername"
+ message = "hello world,嗨 世界"
+ record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None)
+ overwrite_trace = "abc"
+ overwrite_span = "123"
+ overwrite_tracesampled = False
+ record.trace = overwrite_trace
+ record.span_id = overwrite_span
+ record.trace_sampled = overwrite_tracesampled
+ expected_payload = {
+ "logging.googleapis.com/trace": overwrite_trace,
+ "logging.googleapis.com/spanId": overwrite_span,
+ "logging.googleapis.com/trace_sampled": overwrite_tracesampled,
+ }
+
+ with _setup_otel_span_context():
+ handler.filter(record)
+ result = json.loads(handler.format(record))
+ for key, value in expected_payload.items():
self.assertEqual(value, result[key])
def test_format_with_json_fields(self):
@@ -590,7 +692,7 @@ def test_json_fields_input_unmodified(self):
handler.format(record)
# ensure json_fields has no side-effects
self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys()))
- for (key, value) in json_fields_orig.items():
+ for key, value in json_fields_orig.items():
self.assertEqual(
value, json_fields[key], f"expected_payload[{key}] != result[{key}]"
)
diff --git a/tests/unit/handlers/transports/test_background_thread.py b/tests/unit/handlers/transports/test_background_thread.py
index d4954ff7b..9fdccb172 100644
--- a/tests/unit/handlers/transports/test_background_thread.py
+++ b/tests/unit/handlers/transports/test_background_thread.py
@@ -12,13 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import contextlib
import time
import logging
import queue
+import re
import unittest
import mock
+from io import StringIO
+
class TestBackgroundThreadHandler(unittest.TestCase):
PROJECT = "PROJECT"
@@ -176,6 +180,11 @@ def test_worker(self):
class Test_Worker(unittest.TestCase):
NAME = "python_logger"
+ def setUp(self):
+ import sys
+
+ print("In method", self._testMethodName, file=sys.stderr)
+
@staticmethod
def _get_target_class():
from google.cloud.logging_v2.handlers.transports import background_thread
@@ -187,9 +196,26 @@ def _make_one(self, *args, **kw):
def _start_with_thread_patch(self, worker):
with mock.patch("threading.Thread", new=_Thread) as thread_mock:
- with mock.patch("atexit.register") as atexit_mock:
- worker.start()
- return thread_mock, atexit_mock
+ worker.start()
+ return thread_mock
+
+ @staticmethod
+ @contextlib.contextmanager
+ def _init_atexit_mock():
+ atexit_mock = _AtexitMock()
+ with mock.patch.multiple(
+ "atexit", register=atexit_mock.register, unregister=atexit_mock.unregister
+ ):
+ yield atexit_mock
+
+ @staticmethod
+ @contextlib.contextmanager
+ def _init_main_thread_is_alive_mock(is_alive):
+ with mock.patch("threading.main_thread") as main_thread_func_mock:
+ main_thread_obj_mock = mock.Mock()
+ main_thread_func_mock.return_value = main_thread_obj_mock
+ main_thread_obj_mock.is_alive = mock.Mock(return_value=is_alive)
+ yield
def test_constructor(self):
logger = _Logger(self.NAME)
@@ -216,14 +242,15 @@ def test_start(self):
worker = self._make_one(_Logger(self.NAME))
- _, atexit_mock = self._start_with_thread_patch(worker)
+ with self._init_atexit_mock() as atexit_mock:
+ self._start_with_thread_patch(worker)
self.assertTrue(worker.is_alive)
self.assertIsNotNone(worker._thread)
self.assertTrue(worker._thread.daemon)
self.assertEqual(worker._thread._target, worker._thread_main)
self.assertEqual(worker._thread._name, background_thread._WORKER_THREAD_NAME)
- atexit_mock.assert_called_once_with(worker._main_thread_terminated)
+ self.assertIn(worker._handle_exit, atexit_mock.registered_funcs)
# Calling start again should not start a new thread.
current_thread = worker._thread
@@ -260,29 +287,33 @@ def test_stop_no_grace(self):
self.assertEqual(thread._timeout, None)
- def test__main_thread_terminated(self):
+ def test__close(self):
worker = self._make_one(_Logger(self.NAME))
self._start_with_thread_patch(worker)
- worker._main_thread_terminated()
+ worker._close("")
self.assertFalse(worker.is_alive)
# Calling twice should not be an error
- worker._main_thread_terminated()
+ worker._close("")
- def test__main_thread_terminated_non_empty_queue(self):
+ def test__close_non_empty_queue(self):
worker = self._make_one(_Logger(self.NAME))
+ msg = "My Message"
self._start_with_thread_patch(worker)
record = mock.Mock()
record.created = time.time()
worker.enqueue(record, "")
- worker._main_thread_terminated()
+
+ with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock:
+ worker._close(msg)
+ self.assertIn(msg, stderr_mock.getvalue())
self.assertFalse(worker.is_alive)
- def test__main_thread_terminated_did_not_join(self):
+ def test__close_did_not_join(self):
worker = self._make_one(_Logger(self.NAME))
self._start_with_thread_patch(worker)
@@ -290,7 +321,65 @@ def test__main_thread_terminated_did_not_join(self):
record = mock.Mock()
record.created = time.time()
worker.enqueue(record, "")
- worker._main_thread_terminated()
+ worker._close("")
+
+ self.assertFalse(worker.is_alive)
+
+ def test__handle_exit(self):
+ from google.cloud.logging_v2.handlers.transports.background_thread import (
+ _CLOSE_THREAD_SHUTDOWN_ERROR_MSG,
+ )
+
+ worker = self._make_one(_Logger(self.NAME))
+
+ with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock:
+ with self._init_main_thread_is_alive_mock(False):
+ with self._init_atexit_mock():
+ self._start_with_thread_patch(worker)
+ self._enqueue_record(worker, "test")
+ worker._handle_exit()
+
+ self.assertRegex(
+ stderr_mock.getvalue(),
+ re.compile("^%s$" % _CLOSE_THREAD_SHUTDOWN_ERROR_MSG, re.MULTILINE),
+ )
+
+ self.assertRegex(
+ stderr_mock.getvalue(),
+ re.compile(
+ r"^Failed to send %d pending logs\.$" % worker._queue.qsize(),
+ re.MULTILINE,
+ ),
+ )
+
+ def test__handle_exit_no_items(self):
+ worker = self._make_one(_Logger(self.NAME))
+
+ with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock:
+ with self._init_main_thread_is_alive_mock(False):
+ with self._init_atexit_mock():
+ self._start_with_thread_patch(worker)
+ worker._handle_exit()
+
+ self.assertEqual(stderr_mock.getvalue(), "")
+
+ def test_close_unregister_atexit(self):
+ worker = self._make_one(_Logger(self.NAME))
+
+ with mock.patch("sys.stderr", new_callable=StringIO) as stderr_mock:
+ with self._init_atexit_mock() as atexit_mock:
+ self._start_with_thread_patch(worker)
+ self.assertIn(worker._handle_exit, atexit_mock.registered_funcs)
+ worker.close()
+ self.assertNotIn(worker._handle_exit, atexit_mock.registered_funcs)
+
+ self.assertNotRegex(
+ stderr_mock.getvalue(),
+ re.compile(
+ r"^Failed to send %d pending logs\.$" % worker._queue.qsize(),
+ re.MULTILINE,
+ ),
+ )
self.assertFalse(worker.is_alive)
@@ -402,6 +491,23 @@ def test__thread_main_batches(self):
self.assertFalse(worker._cloud_logger._batch.commit_called)
self.assertEqual(worker._queue.qsize(), 0)
+ def test__thread_main_main_thread_terminated(self):
+ from google.cloud.logging_v2.handlers.transports import background_thread
+
+ worker = self._make_one(_Logger(self.NAME))
+ self._enqueue_record(worker, "1")
+ worker._queue.put_nowait(background_thread._WORKER_TERMINATOR)
+
+ with mock.patch("threading.main_thread") as main_thread_func_mock:
+ main_thread_obj_mock = mock.Mock()
+ main_thread_func_mock.return_value = main_thread_obj_mock
+ main_thread_obj_mock.is_alive = mock.Mock(return_value=False)
+ self._enqueue_record(worker, "1")
+ self._enqueue_record(worker, "2")
+ worker._thread_main()
+
+ self.assertFalse(worker._cloud_logger._batch.commit_called)
+
@mock.patch("time.time", autospec=True, return_value=1)
def test__thread_main_max_latency(self, time):
# Note: this test is a bit brittle as it assumes the operation of
@@ -565,3 +671,16 @@ def __init__(self, project, _http=None, credentials=None):
def logger(self, name, resource=None): # pylint: disable=unused-argument
self._logger = _Logger(name, resource=resource)
return self._logger
+
+
+class _AtexitMock(object):
+ """_AtexitMock is a simulation of registering/unregistering functions in atexit using a dummy set."""
+
+ def __init__(self):
+ self.registered_funcs = set()
+
+ def register(self, func):
+ self.registered_funcs.add(func)
+
+ def unregister(self, func):
+ self.registered_funcs.remove(func)
diff --git a/tests/unit/handlers/transports/test_base.py b/tests/unit/handlers/transports/test_base.py
index 71ef1366a..b723db87b 100644
--- a/tests/unit/handlers/transports/test_base.py
+++ b/tests/unit/handlers/transports/test_base.py
@@ -16,7 +16,6 @@
class TestBaseHandler(unittest.TestCase):
-
PROJECT = "PROJECT"
@staticmethod
@@ -39,3 +38,7 @@ def test_resource_is_valid_argunent(self):
def test_flush_is_abstract_and_optional(self):
target = self._make_one("client", "name")
target.flush()
+
+ def test_close_is_abstract_and_optional(self):
+ target = self._make_one("client", "name")
+ target.close()
diff --git a/tests/unit/handlers/transports/test_sync.py b/tests/unit/handlers/transports/test_sync.py
index 752a96d9f..01a949d24 100644
--- a/tests/unit/handlers/transports/test_sync.py
+++ b/tests/unit/handlers/transports/test_sync.py
@@ -17,7 +17,6 @@
class TestSyncHandler(unittest.TestCase):
-
PROJECT = "PROJECT"
@staticmethod
diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py
index 8bf25870a..58e230129 100644
--- a/tests/unit/test__gapic.py
+++ b/tests/unit/test__gapic.py
@@ -17,6 +17,8 @@
import google.auth.credentials
import mock
+from datetime import datetime
+
import google.cloud.logging
from google.cloud import logging_v2
from google.cloud.logging_v2 import _gapic
@@ -173,6 +175,21 @@ def test_write_entries_single(self):
assert request.entries[0].resource.type == entry["resource"]["type"]
assert request.entries[0].text_payload == "text"
+ def test_write_entries_parse_error(self):
+ client = self.make_logging_api()
+ with self.assertRaises(ValueError):
+ with mock.patch.object(
+ type(client._gapic_api.transport.write_log_entries), "__call__"
+ ) as call:
+ entry = {
+ "logName": self.LOG_PATH,
+ "resource": {"type": "global"},
+ "jsonPayload": {"time": datetime.now()},
+ }
+ client.write_entries([entry])
+
+ call.assert_not_called()
+
def test_logger_delete(self):
client = self.make_logging_api()
@@ -595,7 +612,6 @@ def test_non_registry_failure(self, msg_to_dict_mock):
msg_to_dict_mock.assert_called_once_with(
entry_pb,
preserving_proto_field_name=False,
- including_default_value_fields=False,
)
def test_unregistered_type(self):
diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py
index f9b60cfa6..5709a50a6 100644
--- a/tests/unit/test__http.py
+++ b/tests/unit/test__http.py
@@ -24,7 +24,6 @@ def _make_credentials():
class TestConnection(unittest.TestCase):
-
PROJECT = "project"
FILTER = "logName:syslog AND severity>=ERROR"
@@ -96,7 +95,6 @@ def test_extra_headers(self):
class Test_LoggingAPI(unittest.TestCase):
-
PROJECT = "project"
PROJECT_PATH = "projects/project"
LIST_ENTRIES_PATH = "entries:list"
@@ -124,9 +122,9 @@ def test_ctor(self):
@staticmethod
def _make_timestamp():
import datetime
- from google.cloud._helpers import UTC
+ from datetime import timezone
- NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
+ NOW = datetime.datetime.now(timezone.utc)
return NOW, _datetime_to_rfc3339_w_nanos(NOW)
def test_list_entries_with_limits(self):
@@ -354,7 +352,6 @@ def test_logger_delete(self):
class Test_SinksAPI(unittest.TestCase):
-
PROJECT = "project"
PROJECT_PATH = "projects/project"
FILTER = "logName:syslog AND severity>=ERROR"
@@ -636,7 +633,6 @@ def test_sink_delete_hit(self):
class Test_MetricsAPI(unittest.TestCase):
-
PROJECT = "project"
FILTER = "logName:syslog AND severity>=ERROR"
LIST_METRICS_PATH = "projects/%s/metrics" % (PROJECT,)
@@ -865,7 +861,6 @@ def test_metric_delete_hit(self):
class _Connection(object):
-
_called_with = None
_raise_conflict = False
diff --git a/tests/unit/test__instrumentation.py b/tests/unit/test__instrumentation.py
index dc330b0ca..97473ee61 100644
--- a/tests/unit/test__instrumentation.py
+++ b/tests/unit/test__instrumentation.py
@@ -17,7 +17,6 @@
class TestInstrumentation(unittest.TestCase):
-
TEST_NAME = "python"
# LONG_NAME > 14 characters
LONG_NAME = TEST_NAME + "789ABCDEF"
@@ -26,7 +25,7 @@ class TestInstrumentation(unittest.TestCase):
# LONG_VERSION > 16 characters
LONG_VERSION = TEST_VERSION + "6789ABCDEF12"
- def _get_diagonstic_value(self, entry, key):
+ def _get_diagnostic_value(self, entry, key):
return entry.payload[i._DIAGNOSTIC_INFO_KEY][i._INSTRUMENTATION_SOURCE_KEY][-1][
key
]
@@ -35,10 +34,10 @@ def test_default_diagnostic_info(self):
entry = i._create_diagnostic_entry()
self.assertEqual(
i._PYTHON_LIBRARY_NAME,
- self._get_diagonstic_value(entry, "name"),
+ self._get_diagnostic_value(entry, "name"),
)
self.assertEqual(
- i._LIBRARY_VERSION, self._get_diagonstic_value(entry, "version")
+ i._LIBRARY_VERSION, self._get_diagnostic_value(entry, "version")
)
def test_custom_diagnostic_info(self):
@@ -47,10 +46,10 @@ def test_custom_diagnostic_info(self):
)
self.assertEqual(
self.TEST_NAME,
- self._get_diagonstic_value(entry, "name"),
+ self._get_diagnostic_value(entry, "name"),
)
self.assertEqual(
- self.TEST_VERSION, self._get_diagonstic_value(entry, "version")
+ self.TEST_VERSION, self._get_diagnostic_value(entry, "version")
)
def test_truncate_long_values(self):
@@ -61,8 +60,8 @@ def test_truncate_long_values(self):
expected_name = self.LONG_NAME[: i._MAX_NAME_LENGTH] + "*"
expected_version = self.LONG_VERSION[: i._MAX_VERSION_LENGTH] + "*"
- self.assertEqual(expected_name, self._get_diagonstic_value(entry, "name"))
- self.assertEqual(expected_version, self._get_diagonstic_value(entry, "version"))
+ self.assertEqual(expected_name, self._get_diagnostic_value(entry, "name"))
+ self.assertEqual(expected_version, self._get_diagnostic_value(entry, "version"))
def test_drop_labels(self):
"""Labels should not be copied in instrumentation log"""
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index 1c47a343b..6a9a7fd84 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -34,7 +34,6 @@ def _make_credentials():
class TestClient(unittest.TestCase):
-
PROJECT = "PROJECT"
PROJECT_PATH = f"projects/{PROJECT}"
LOGGER_NAME = "LOGGER_NAME"
@@ -843,14 +842,12 @@ def test_setup_logging(self):
(handler,) = args
self.assertIsInstance(handler, CloudLoggingHandler)
+ self.assertIn(handler, client._handlers)
handler.transport.worker.stop()
expected_kwargs = {
"excluded_loggers": (
- "google.cloud",
- "google.auth",
- "google_auth_httplib2",
"google.api_core.bidi",
"werkzeug",
),
@@ -886,14 +883,12 @@ def test_setup_logging_w_extra_kwargs(self):
self.assertEqual(handler.name, name)
self.assertEqual(handler.resource, resource)
self.assertEqual(handler.labels, labels)
+ self.assertIn(handler, client._handlers)
handler.transport.worker.stop()
expected_kwargs = {
"excluded_loggers": (
- "google.cloud",
- "google.auth",
- "google_auth_httplib2",
"google.api_core.bidi",
"werkzeug",
),
@@ -901,9 +896,206 @@ def test_setup_logging_w_extra_kwargs(self):
}
self.assertEqual(kwargs, expected_kwargs)
+ def test_setup_logging_w_extra_kwargs_structured_log(self):
+ import io
+ from google.cloud.logging.handlers import StructuredLogHandler
+ from google.cloud.logging import Resource
+ from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE
-class _Connection(object):
+ name = "test-logger"
+ resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"})
+ labels = {"handler_label": "value"}
+ stream = io.BytesIO()
+
+ credentials = _make_credentials()
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, _use_grpc=False
+ )
+
+ with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked:
+ client.setup_logging(
+ name=name, resource=resource, labels=labels, stream=stream
+ )
+
+ self.assertEqual(len(mocked.mock_calls), 1)
+ _, args, kwargs = mocked.mock_calls[0]
+
+ (handler,) = args
+ self.assertIsInstance(handler, StructuredLogHandler)
+
+ expected_kwargs = {
+ "excluded_loggers": (
+ "google.api_core.bidi",
+ "werkzeug",
+ ),
+ "log_level": 20,
+ }
+ self.assertEqual(kwargs, expected_kwargs)
+ self.assertIn(handler, client._handlers)
+
+ def test_flush_handlers_cloud_logging_handler(self):
+ import io
+ from google.cloud.logging.handlers import CloudLoggingHandler
+ from google.cloud.logging import Resource
+
+ name = "test-logger"
+ resource = Resource("resource_type", {"resource_label": "value"})
+ labels = {"handler_label": "value"}
+ stream = io.BytesIO()
+
+ credentials = _make_credentials()
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, _use_grpc=False
+ )
+
+ with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked:
+ client.setup_logging(
+ name=name, resource=resource, labels=labels, stream=stream
+ )
+
+ self.assertEqual(len(mocked.mock_calls), 1)
+ _, args, kwargs = mocked.mock_calls[0]
+
+ (handler,) = args
+ self.assertIsInstance(handler, CloudLoggingHandler)
+
+ handler.flush = mock.Mock()
+ client.flush_handlers()
+ handler.flush.assert_called_once_with()
+
+ def test_flush_handlers_cloud_logging_handler_no_setup_logging(self):
+ from google.cloud.logging.handlers import CloudLoggingHandler
+
+ credentials = _make_credentials()
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, _use_grpc=False
+ )
+
+ handler = CloudLoggingHandler(client)
+ self.assertIn(handler, client._handlers)
+
+ handler.flush = mock.Mock()
+ client.flush_handlers()
+ handler.flush.assert_called_once_with()
+
+ def test_flush_handlers_structured_log(self):
+ import io
+ from google.cloud.logging.handlers import StructuredLogHandler
+ from google.cloud.logging import Resource
+ from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE
+
+ name = "test-logger"
+ resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"})
+ labels = {"handler_label": "value"}
+ stream = io.BytesIO()
+
+ credentials = _make_credentials()
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, _use_grpc=False
+ )
+
+ with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked:
+ client.setup_logging(
+ name=name, resource=resource, labels=labels, stream=stream
+ )
+
+ self.assertEqual(len(mocked.mock_calls), 1)
+ _, args, kwargs = mocked.mock_calls[0]
+
+ (handler,) = args
+ self.assertIsInstance(handler, StructuredLogHandler)
+
+ handler.flush = mock.Mock()
+ client.flush_handlers()
+ handler.flush.assert_called_once_with()
+
+ def test_close_cloud_logging_handler(self):
+ import contextlib
+ import io
+ from google.cloud.logging.handlers import CloudLoggingHandler
+ from google.cloud.logging import Resource
+
+ name = "test-logger"
+ resource = Resource("resource_type", {"resource_label": "value"})
+ labels = {"handler_label": "value"}
+ stream = io.BytesIO()
+
+ credentials = _make_credentials()
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, _use_grpc=False
+ )
+
+ with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked:
+ client.setup_logging(
+ name=name, resource=resource, labels=labels, stream=stream
+ )
+
+ self.assertEqual(len(mocked.mock_calls), 1)
+ _, args, kwargs = mocked.mock_calls[0]
+
+ (handler,) = args
+ self.assertIsInstance(handler, CloudLoggingHandler)
+
+ handler.close = mock.Mock()
+ with contextlib.closing(client):
+ pass
+ handler.close.assert_called_once_with()
+
+ def test_close_cloud_logging_handler_no_setup_logging(self):
+ import contextlib
+ from google.cloud.logging.handlers import CloudLoggingHandler
+
+ credentials = _make_credentials()
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, _use_grpc=False
+ )
+
+ handler = CloudLoggingHandler(client)
+ self.assertIn(handler, client._handlers)
+
+ handler.close = mock.Mock()
+ with contextlib.closing(client):
+ pass
+
+ handler.close.assert_called_once_with()
+
+ def test_close_structured_log_handler(self):
+ import contextlib
+ import io
+ from google.cloud.logging.handlers import StructuredLogHandler
+ from google.cloud.logging import Resource
+ from google.cloud.logging_v2.client import _GKE_RESOURCE_TYPE
+
+ name = "test-logger"
+ resource = Resource(_GKE_RESOURCE_TYPE, {"resource_label": "value"})
+ labels = {"handler_label": "value"}
+ stream = io.BytesIO()
+
+ credentials = _make_credentials()
+ client = self._make_one(
+ project=self.PROJECT, credentials=credentials, _use_grpc=False
+ )
+
+ with mock.patch("google.cloud.logging_v2.client.setup_logging") as mocked:
+ client.setup_logging(
+ name=name, resource=resource, labels=labels, stream=stream
+ )
+
+ self.assertEqual(len(mocked.mock_calls), 1)
+ _, args, kwargs = mocked.mock_calls[0]
+
+ (handler,) = args
+ self.assertIsInstance(handler, StructuredLogHandler)
+
+ handler.close = mock.Mock()
+ with contextlib.closing(client):
+ pass
+
+ handler.close.assert_called_once_with()
+
+
+class _Connection(object):
_called_with = None
def __init__(self, *responses):
diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py
index 6f3af684f..382674ebd 100644
--- a/tests/unit/test_entries.py
+++ b/tests/unit/test_entries.py
@@ -79,7 +79,6 @@ def test_w_str(self):
class TestLogEntry(unittest.TestCase):
-
PROJECT = "PROJECT"
LOGGER_NAME = "LOGGER_NAME"
@@ -201,14 +200,14 @@ def test_from_api_repr_missing_data_no_loggers(self):
def test_from_api_repr_w_loggers_no_logger_match(self):
from datetime import datetime
- from google.cloud._helpers import UTC
+ from datetime import timezone
from google.cloud.logging import Resource
klass = self._get_target_class()
client = _Client(self.PROJECT)
SEVERITY = "CRITICAL"
IID = "IID"
- NOW = datetime.utcnow().replace(tzinfo=UTC)
+ NOW = datetime.now(timezone.utc)
TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW)
LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME)
LABELS = {"foo": "bar", "baz": "qux"}
@@ -284,11 +283,11 @@ def test_from_api_repr_w_loggers_no_logger_match(self):
def test_from_api_repr_w_loggers_w_logger_match(self):
from datetime import datetime
from datetime import timedelta
- from google.cloud._helpers import UTC
+ from datetime import timezone
client = _Client(self.PROJECT)
IID = "IID"
- NOW = datetime.utcnow().replace(tzinfo=UTC)
+ NOW = datetime.now(timezone.utc)
LATER = NOW + timedelta(seconds=1)
TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW)
RECEIVED = _datetime_to_rfc3339_w_nanos(LATER)
@@ -342,11 +341,11 @@ def test_from_api_repr_w_loggers_w_logger_match(self):
def test_from_api_repr_w_folder_path(self):
from datetime import datetime
from datetime import timedelta
- from google.cloud._helpers import UTC
+ from datetime import timezone
client = _Client(self.PROJECT)
IID = "IID"
- NOW = datetime.utcnow().replace(tzinfo=UTC)
+ NOW = datetime.now(timezone.utc)
LATER = NOW + timedelta(seconds=1)
TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW)
RECEIVED = _datetime_to_rfc3339_w_nanos(LATER)
@@ -469,7 +468,6 @@ def test_to_api_repr_explicit(self):
class TestTextEntry(unittest.TestCase):
-
PROJECT = "PROJECT"
LOGGER_NAME = "LOGGER_NAME"
@@ -557,7 +555,6 @@ def test_to_api_repr_explicit(self):
class TestStructEntry(unittest.TestCase):
-
PROJECT = "PROJECT"
LOGGER_NAME = "LOGGER_NAME"
@@ -659,7 +656,6 @@ def test_to_api_repr_explicit(self):
class TestProtobufEntry(unittest.TestCase):
-
PROJECT = "PROJECT"
LOGGER_NAME = "LOGGER_NAME"
@@ -743,6 +739,45 @@ def test_to_api_repr_proto_defaults(self):
}
self.assertEqual(entry.to_api_repr(), expected)
+ def test_to_api_repr_proto_inner_struct_field(self):
+ from google.protobuf.json_format import MessageToDict
+ from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE
+ from google.protobuf.struct_pb2 import Struct
+ from google.protobuf.struct_pb2 import Value
+
+ LOG_NAME = "test.log"
+ inner_struct = Struct(fields={"foo": Value(string_value="bar")})
+ message = Struct(fields={"inner": Value(struct_value=inner_struct)})
+
+ entry = self._make_one(log_name=LOG_NAME, payload=message)
+ expected = {
+ "logName": LOG_NAME,
+ "protoPayload": MessageToDict(message),
+ "resource": _GLOBAL_RESOURCE._to_dict(),
+ }
+ self.assertEqual(entry.to_api_repr(), expected)
+
+ def test_to_api_repr_proto_inner_list_field(self):
+ from google.protobuf.json_format import MessageToDict
+ from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE
+ from google.protobuf.struct_pb2 import ListValue
+ from google.protobuf.struct_pb2 import Struct
+ from google.protobuf.struct_pb2 import Value
+
+ LOG_NAME = "test.log"
+ lines = ListValue(
+ values=[Value(string_value="line1"), Value(string_value="line2")]
+ )
+ message = Struct(fields={"lines": Value(list_value=lines)})
+
+ entry = self._make_one(log_name=LOG_NAME, payload=message)
+ expected = {
+ "logName": LOG_NAME,
+ "protoPayload": MessageToDict(message),
+ "resource": _GLOBAL_RESOURCE._to_dict(),
+ }
+ self.assertEqual(entry.to_api_repr(), expected)
+
def test_to_api_repr_proto_explicit(self):
import datetime
from google.protobuf.json_format import MessageToDict
diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py
index 16c89959b..cdb56747d 100644
--- a/tests/unit/test_logger.py
+++ b/tests/unit/test_logger.py
@@ -28,7 +28,6 @@ def _make_credentials():
class TestLogger(unittest.TestCase):
-
PROJECT = "test-project"
LOGGER_NAME = "logger-name"
TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"'
@@ -1086,7 +1085,6 @@ def test_first_log_emits_instrumentation(self):
class TestBatch(unittest.TestCase):
-
PROJECT = "test-project"
@staticmethod
@@ -1847,7 +1845,6 @@ def test_batch_error_gets_context(self):
class _Logger(object):
-
labels = None
def __init__(self, name="NAME", project="PROJECT"):
@@ -1855,7 +1852,6 @@ def __init__(self, name="NAME", project="PROJECT"):
class _DummyLoggingAPI(object):
-
_write_entries_called_with = None
def write_entries(
@@ -1909,7 +1905,6 @@ class _Bugout(Exception):
class _Connection(object):
-
_called_with = None
def __init__(self, *responses):
diff --git a/tests/unit/test_metric.py b/tests/unit/test_metric.py
index 83b49d02d..f36ae3b2a 100644
--- a/tests/unit/test_metric.py
+++ b/tests/unit/test_metric.py
@@ -16,7 +16,6 @@
class TestMetric(unittest.TestCase):
-
PROJECT = "test-project"
METRIC_NAME = "metric-name"
FULL_METRIC_NAME = f"projects/{PROJECT}/metrics/{METRIC_NAME}"
diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py
new file mode 100644
index 000000000..4369ca2c1
--- /dev/null
+++ b/tests/unit/test_packaging.py
@@ -0,0 +1,56 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+
+
+def test_namespace_package_compat(tmp_path):
+ # The ``google`` namespace package should not be masked
+ # by the presence of ``google-cloud-logging``.
+
+ google = tmp_path / "google"
+ google.mkdir()
+ google.joinpath("othermod.py").write_text("")
+
+ google_otherpkg = tmp_path / "google" / "otherpkg"
+ google_otherpkg.mkdir()
+ google_otherpkg.joinpath("__init__.py").write_text("")
+
+ # The ``google.cloud`` namespace package should not be masked
+ # by the presence of ``google-cloud-logging``.
+ google_cloud = tmp_path / "google" / "cloud"
+ google_cloud.mkdir()
+ google_cloud.joinpath("othermod.py").write_text("")
+
+ google_cloud_otherpkg = tmp_path / "google" / "cloud" / "otherpkg"
+ google_cloud_otherpkg.mkdir()
+ google_cloud_otherpkg.joinpath("__init__.py").write_text("")
+
+ env = dict(os.environ, PYTHONPATH=str(tmp_path))
+
+ for pkg in [
+ "google.othermod",
+ "google.cloud.othermod",
+ "google.otherpkg",
+ "google.cloud.otherpkg",
+ "google.cloud.logging",
+ ]:
+ cmd = [sys.executable, "-c", f"import {pkg}"]
+ subprocess.check_output(cmd, env=env)
+
+ for module in ["google.othermod", "google.cloud.othermod"]:
+ cmd = [sys.executable, "-m", module]
+ subprocess.check_output(cmd, env=env)
diff --git a/tests/unit/test_sink.py b/tests/unit/test_sink.py
index 1e4852ab5..b5005b057 100644
--- a/tests/unit/test_sink.py
+++ b/tests/unit/test_sink.py
@@ -16,7 +16,6 @@
class TestSink(unittest.TestCase):
-
PROJECT = "test-project"
PROJECT_PATH = f"projects/{PROJECT}"
SINK_NAME = "sink-name"