From b86081ef65663a89abae890ac64e2a94edf92c98 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Thu, 1 Aug 2024 16:04:00 -0400 Subject: [PATCH 1/3] test: Added timestamp to sink names + autodelete sinks older than 2 hours in export_test.py (#925) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Running this to remove all sinks * readd try block * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * added timestamp to sink name * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Fixed regex string * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- samples/snippets/export_test.py | 49 +++++++++++++++++++++++++-------- 1 file changed, 37 insertions(+), 12 deletions(-) diff --git a/samples/snippets/export_test.py b/samples/snippets/export_test.py index c5830e3b2..c21fab2da 100644 --- a/samples/snippets/export_test.py +++ b/samples/snippets/export_test.py @@ -13,8 +13,10 @@ # limitations under the License. import os +import re import random import string +import time import backoff from google.cloud import logging @@ -24,8 +26,13 @@ BUCKET = os.environ["CLOUD_STORAGE_BUCKET"] -TEST_SINK_NAME_TMPL = "example_sink_{}" +TEST_SINK_NAME_TMPL = "example_sink_{}_{}" TEST_SINK_FILTER = "severity>=CRITICAL" +TIMESTAMP = int(time.time()) + +# Threshold beyond which the cleanup_old_sinks fixture will delete +# old sink, in seconds +CLEANUP_THRESHOLD = 7200 # 2 hours def _random_id(): @@ -34,12 +41,36 @@ def _random_id(): ) +def _create_sink_name(): + return TEST_SINK_NAME_TMPL.format(TIMESTAMP, _random_id()) + + +@backoff.on_exception(backoff.expo, Exception, max_time=60, raise_on_giveup=False) +def _delete_sink(sink): + sink.delete() + + +# Runs once for entire test suite +@pytest.fixture(scope="module") +def cleanup_old_sinks(): + client = logging.Client() + test_sink_name_regex = ( + r"^" + TEST_SINK_NAME_TMPL.format(r"(\d+)", r"[A-Z0-9]{6}") + r"$" + ) + for sink in client.list_sinks(): + match = re.match(test_sink_name_regex, sink.name) + if match: + sink_timestamp = int(match.group(1)) + if TIMESTAMP - sink_timestamp > CLEANUP_THRESHOLD: + _delete_sink(sink) + + @pytest.fixture -def example_sink(): +def example_sink(cleanup_old_sinks): client = logging.Client() sink = client.sink( - TEST_SINK_NAME_TMPL.format(_random_id()), + _create_sink_name(), filter_=TEST_SINK_FILTER, destination="storage.googleapis.com/{bucket}".format(bucket=BUCKET), ) @@ -48,10 +79,7 @@ def example_sink(): yield sink - try: - sink.delete() - except Exception: - pass + _delete_sink(sink) def test_list(example_sink, capsys): @@ -65,16 +93,13 @@ def eventually_consistent_test(): def test_create(capsys): - sink_name = TEST_SINK_NAME_TMPL.format(_random_id()) + sink_name = _create_sink_name() try: export.create_sink(sink_name, BUCKET, TEST_SINK_FILTER) # Clean-up the temporary sink. finally: - try: - logging.Client().sink(sink_name).delete() - except Exception: - pass + _delete_sink(logging.Client().sink(sink_name)) out, _ = capsys.readouterr() assert sink_name in out From 7746e643af29b1008d6e6d6a9958c8337c958dd4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 17:16:42 -0400 Subject: [PATCH 2/3] fix: Allow protobuf 5.x (#888) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.17.1 PiperOrigin-RevId: 629071173 Source-Link: https://github.com/googleapis/googleapis/commit/4afa392105cc62e965631d15b772ff68454ecf1c Source-Link: https://github.com/googleapis/googleapis-gen/commit/16dbbb4d0457db5e61ac9f99b0d52a46154455ac Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZkYmJiNGQwNDU3ZGI1ZTYxYWM5Zjk5YjBkNTJhNDYxNTQ0NTVhYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.0 PiperOrigin-RevId: 638650618 Source-Link: https://github.com/googleapis/googleapis/commit/6330f0389afdd04235c59898cc44f715b077aa25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/44fa4f1979dc45c1778fd7caf13f8e61c6d1cae8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat(spanner): Add support for Cloud Spanner Scheduled Backups PiperOrigin-RevId: 649277844 Source-Link: https://github.com/googleapis/googleapis/commit/fd7efa2da3860e813485e63661d3bdd21fc9ba82 Source-Link: https://github.com/googleapis/googleapis-gen/commit/50be251329d8db5b555626ebd4886721f547d3cc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * allow protobuf 5.x * update constraints * See https://github.com/googleapis/gapic-generator-python/issues/2046 * chore: Update gapic-generator-python to v1.18.3 PiperOrigin-RevId: 655567917 Source-Link: https://github.com/googleapis/googleapis/commit/43aa65e3897557c11d947f3133ddb76e5c4b2a6c Source-Link: https://github.com/googleapis/googleapis-gen/commit/0e38378753074c0f66ff63348d6864929e104d5c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGUzODM3ODc1MzA3NGMwZjY2ZmY2MzM0OGQ2ODY0OTI5ZTEwNGQ1YyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.3 PiperOrigin-RevId: 656040068 Source-Link: https://github.com/googleapis/googleapis/commit/3f4e29a88f2e1f412439e61c48c88f81dec0bbbf Source-Link: https://github.com/googleapis/googleapis-gen/commit/b8feb2109dde7b0938c22c993d002251ac6714dc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjhmZWIyMTA5ZGRlN2IwOTM4YzIyYzk5M2QwMDIyNTFhYzY3MTRkYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.4 PiperOrigin-RevId: 657207628 Source-Link: https://github.com/googleapis/googleapis/commit/33fe71e5a2061402283e0455636a98e5b78eaf7f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e02739d122ed15bd5ef5771c57f12a83d47a1dda Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTAyNzM5ZDEyMmVkMTViZDVlZjU3NzFjNTdmMTJhODNkNDdhMWRkYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Daniel Sanche --- .../config_service_v2/async_client.py | 585 ++-- .../services/config_service_v2/client.py | 300 +- .../services/config_service_v2/pagers.py | 153 +- .../config_service_v2/transports/base.py | 4 +- .../config_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 271 +- .../logging_service_v2/async_client.py | 177 +- .../services/logging_service_v2/client.py | 79 +- .../services/logging_service_v2/pagers.py | 97 +- .../logging_service_v2/transports/base.py | 4 +- .../logging_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 130 +- .../metrics_service_v2/async_client.py | 147 +- .../services/metrics_service_v2/client.py | 79 +- .../services/metrics_service_v2/pagers.py | 41 +- .../metrics_service_v2/transports/base.py | 4 +- .../metrics_service_v2/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 103 +- pytest.ini | 2 + .../snippet_metadata_google.logging.v2.json | 2 +- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- .../logging_v2/test_config_service_v2.py | 2815 ++++++++++++++++- .../logging_v2/test_logging_service_v2.py | 528 +++- .../logging_v2/test_metrics_service_v2.py | 431 ++- 25 files changed, 5125 insertions(+), 915 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index 729a878be..69fa55a48 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -215,7 +217,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -227,9 +233,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -348,8 +356,8 @@ async def sample_list_buckets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -357,7 +365,10 @@ async def sample_list_buckets(): "the individual field arguments should be set." ) - request = logging_config.ListBucketsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -366,11 +377,9 @@ async def sample_list_buckets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_buckets, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_buckets + ] # Certain fields should be provided within the metadata header; # add these here. @@ -395,6 +404,8 @@ async def sample_list_buckets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -453,15 +464,16 @@ async def sample_get_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.GetBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -546,15 +558,16 @@ async def sample_create_bucket_async(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_bucket_async, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket_async + ] # Certain fields should be provided within the metadata header; # add these here. @@ -649,15 +662,16 @@ async def sample_update_bucket_async(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_bucket_async, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket_async + ] # Certain fields should be provided within the metadata header; # add these here. @@ -742,15 +756,16 @@ async def sample_create_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -830,15 +845,16 @@ async def sample_update_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -908,15 +924,16 @@ async def sample_delete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -980,15 +997,16 @@ async def sample_undelete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.UndeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.undelete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.undelete_bucket + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1074,8 +1092,8 @@ async def sample_list_views(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1083,7 +1101,10 @@ async def sample_list_views(): "the individual field arguments should be set." ) - request = logging_config.ListViewsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1092,11 +1113,9 @@ async def sample_list_views(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_views, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_views + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1121,6 +1140,8 @@ async def sample_list_views(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1179,15 +1200,14 @@ async def sample_get_view(): """ # Create or coerce a protobuf request object. - request = logging_config.GetViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1263,15 +1283,16 @@ async def sample_create_view(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1349,15 +1370,16 @@ async def sample_update_view(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1425,15 +1447,16 @@ async def sample_delete_view(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_view + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1522,8 +1545,8 @@ async def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1531,7 +1554,10 @@ async def sample_list_sinks(): "the individual field arguments should be set." ) - request = logging_config.ListSinksRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1540,22 +1566,9 @@ async def sample_list_sinks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_sinks, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_sinks + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1580,6 +1593,8 @@ async def sample_list_sinks(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1663,8 +1678,8 @@ async def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1672,7 +1687,10 @@ async def sample_get_sink(): "the individual field arguments should be set." ) - request = logging_config.GetSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1681,22 +1699,7 @@ async def sample_get_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_sink, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -1814,8 +1817,8 @@ async def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: raise ValueError( @@ -1823,7 +1826,10 @@ async def sample_create_sink(): "the individual field arguments should be set." ) - request = logging_config.CreateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1834,11 +1840,9 @@ async def sample_create_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_sink, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1979,8 +1983,8 @@ async def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1988,7 +1992,10 @@ async def sample_update_sink(): "the individual field arguments should be set." ) - request = logging_config.UpdateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2001,22 +2008,9 @@ async def sample_update_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_sink, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2103,8 +2097,8 @@ async def sample_delete_sink(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError( @@ -2112,7 +2106,10 @@ async def sample_delete_sink(): "the individual field arguments should be set." ) - request = logging_config.DeleteSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2121,22 +2118,9 @@ async def sample_delete_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_sink, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_sink + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2250,8 +2234,8 @@ async def sample_create_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2259,7 +2243,10 @@ async def sample_create_link(): "the individual field arguments should be set." ) - request = logging_config.CreateLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2272,11 +2259,9 @@ async def sample_create_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_link + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2385,8 +2370,8 @@ async def sample_delete_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2394,7 +2379,10 @@ async def sample_delete_link(): "the individual field arguments should be set." ) - request = logging_config.DeleteLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2403,11 +2391,9 @@ async def sample_delete_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_link + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2506,8 +2492,8 @@ async def sample_list_links(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2515,7 +2501,10 @@ async def sample_list_links(): "the individual field arguments should be set." ) - request = logging_config.ListLinksRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2524,11 +2513,9 @@ async def sample_list_links(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_links, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_links + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2553,6 +2540,8 @@ async def sample_list_links(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2623,8 +2612,8 @@ async def sample_get_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2632,7 +2621,10 @@ async def sample_get_link(): "the individual field arguments should be set." ) - request = logging_config.GetLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2641,11 +2633,7 @@ async def sample_get_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2738,8 +2726,8 @@ async def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2747,7 +2735,10 @@ async def sample_list_exclusions(): "the individual field arguments should be set." ) - request = logging_config.ListExclusionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2756,22 +2747,9 @@ async def sample_list_exclusions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_exclusions, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_exclusions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2796,6 +2774,8 @@ async def sample_list_exclusions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2877,8 +2857,8 @@ async def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2886,7 +2866,10 @@ async def sample_get_exclusion(): "the individual field arguments should be set." ) - request = logging_config.GetExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2895,22 +2878,9 @@ async def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_exclusion, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3025,8 +2995,8 @@ async def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: raise ValueError( @@ -3034,7 +3004,10 @@ async def sample_create_exclusion(): "the individual field arguments should be set." ) - request = logging_config.CreateExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3045,11 +3018,9 @@ async def sample_create_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3176,8 +3147,8 @@ async def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -3185,7 +3156,10 @@ async def sample_update_exclusion(): "the individual field arguments should be set." ) - request = logging_config.UpdateExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3198,11 +3172,9 @@ async def sample_update_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3286,8 +3258,8 @@ async def sample_delete_exclusion(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3295,7 +3267,10 @@ async def sample_delete_exclusion(): "the individual field arguments should be set." ) - request = logging_config.DeleteExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3304,22 +3279,9 @@ async def sample_delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_exclusion, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_exclusion + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3414,15 +3376,16 @@ async def sample_get_cmek_settings(): """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3525,15 +3488,16 @@ async def sample_update_cmek_settings(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_cmek_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3648,8 +3612,8 @@ async def sample_get_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3657,7 +3621,10 @@ async def sample_get_settings(): "the individual field arguments should be set." ) - request = logging_config.GetSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3666,11 +3633,9 @@ async def sample_get_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3793,8 +3758,8 @@ async def sample_update_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([settings, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -3802,7 +3767,10 @@ async def sample_update_settings(): "the individual field arguments should be set." ) - request = logging_config.UpdateSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3813,11 +3781,9 @@ async def sample_update_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_settings + ] # Certain fields should be provided within the metadata header; # add these here. @@ -3901,15 +3867,16 @@ async def sample_copy_log_entries(): """ # Create or coerce a protobuf request object. - request = logging_config.CopyLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.copy_log_entries, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.copy_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index 5257f8ddf..8c210c0cd 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -644,7 +645,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -656,9 +661,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -767,8 +774,15 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ConfigServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -855,8 +869,8 @@ def sample_list_buckets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -864,10 +878,8 @@ def sample_list_buckets(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListBucketsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListBucketsRequest): request = logging_config.ListBucketsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -902,6 +914,8 @@ def sample_list_buckets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -960,10 +974,8 @@ def sample_get_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetBucketRequest): request = logging_config.GetBucketRequest(request) @@ -1054,10 +1066,8 @@ def sample_create_bucket_async(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateBucketRequest): request = logging_config.CreateBucketRequest(request) @@ -1158,10 +1168,8 @@ def sample_update_bucket_async(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateBucketRequest): request = logging_config.UpdateBucketRequest(request) @@ -1252,10 +1260,8 @@ def sample_create_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateBucketRequest): request = logging_config.CreateBucketRequest(request) @@ -1341,10 +1347,8 @@ def sample_update_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateBucketRequest): request = logging_config.UpdateBucketRequest(request) @@ -1420,10 +1424,8 @@ def sample_delete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteBucketRequest): request = logging_config.DeleteBucketRequest(request) @@ -1493,10 +1495,8 @@ def sample_undelete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UndeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UndeleteBucketRequest): request = logging_config.UndeleteBucketRequest(request) @@ -1588,8 +1588,8 @@ def sample_list_views(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1597,10 +1597,8 @@ def sample_list_views(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListViewsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListViewsRequest): request = logging_config.ListViewsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1635,6 +1633,8 @@ def sample_list_views(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1693,10 +1693,8 @@ def sample_get_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetViewRequest): request = logging_config.GetViewRequest(request) @@ -1778,10 +1776,8 @@ def sample_create_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateViewRequest): request = logging_config.CreateViewRequest(request) @@ -1865,10 +1861,8 @@ def sample_update_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateViewRequest): request = logging_config.UpdateViewRequest(request) @@ -1942,10 +1936,8 @@ def sample_delete_view(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteViewRequest): request = logging_config.DeleteViewRequest(request) @@ -2040,8 +2032,8 @@ def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2049,10 +2041,8 @@ def sample_list_sinks(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListSinksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListSinksRequest): request = logging_config.ListSinksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2087,6 +2077,8 @@ def sample_list_sinks(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2170,8 +2162,8 @@ def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError( @@ -2179,10 +2171,8 @@ def sample_get_sink(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetSinkRequest): request = logging_config.GetSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2310,8 +2300,8 @@ def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: raise ValueError( @@ -2319,10 +2309,8 @@ def sample_create_sink(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateSinkRequest): request = logging_config.CreateSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2475,8 +2463,8 @@ def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2484,10 +2472,8 @@ def sample_update_sink(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateSinkRequest): request = logging_config.UpdateSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2588,8 +2574,8 @@ def sample_delete_sink(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError( @@ -2597,10 +2583,8 @@ def sample_delete_sink(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteSinkRequest): request = logging_config.DeleteSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2724,8 +2708,8 @@ def sample_create_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError( @@ -2733,10 +2717,8 @@ def sample_create_link(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateLinkRequest): request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2859,8 +2841,8 @@ def sample_delete_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -2868,10 +2850,8 @@ def sample_delete_link(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteLinkRequest): request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2980,8 +2960,8 @@ def sample_list_links(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -2989,10 +2969,8 @@ def sample_list_links(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListLinksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListLinksRequest): request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3027,6 +3005,8 @@ def sample_list_links(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3097,8 +3077,8 @@ def sample_get_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3106,10 +3086,8 @@ def sample_get_link(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetLinkRequest): request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3212,8 +3190,8 @@ def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -3221,10 +3199,8 @@ def sample_list_exclusions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListExclusionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListExclusionsRequest): request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3259,6 +3235,8 @@ def sample_list_exclusions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3340,8 +3318,8 @@ def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3349,10 +3327,8 @@ def sample_get_exclusion(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetExclusionRequest): request = logging_config.GetExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3477,8 +3453,8 @@ def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: raise ValueError( @@ -3486,10 +3462,8 @@ def sample_create_exclusion(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateExclusionRequest): request = logging_config.CreateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3628,8 +3602,8 @@ def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -3637,10 +3611,8 @@ def sample_update_exclusion(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateExclusionRequest): request = logging_config.UpdateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3738,8 +3710,8 @@ def sample_delete_exclusion(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -3747,10 +3719,8 @@ def sample_delete_exclusion(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteExclusionRequest): request = logging_config.DeleteExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3855,10 +3825,8 @@ def sample_get_cmek_settings(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetCmekSettingsRequest): request = logging_config.GetCmekSettingsRequest(request) @@ -3967,10 +3935,8 @@ def sample_update_cmek_settings(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateCmekSettingsRequest): request = logging_config.UpdateCmekSettingsRequest(request) @@ -4091,8 +4057,8 @@ def sample_get_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -4100,10 +4066,8 @@ def sample_get_settings(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetSettingsRequest): request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4236,8 +4200,8 @@ def sample_update_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([settings, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -4245,10 +4209,8 @@ def sample_update_settings(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateSettingsRequest): request = logging_config.UpdateSettingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4344,10 +4306,8 @@ def sample_copy_log_entries(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CopyLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CopyLogEntriesRequest): request = logging_config.CopyLogEntriesRequest(request) diff --git a/google/cloud/logging_v2/services/config_service_v2/pagers.py b/google/cloud/logging_v2/services/config_service_v2/pagers.py index 8a9710005..1a1ba7c25 100644 --- a/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.logging_v2.types import logging_config @@ -51,6 +64,8 @@ def __init__( request: logging_config.ListBucketsRequest, response: logging_config.ListBucketsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -62,12 +77,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListBucketsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +98,12 @@ def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogBucket]: @@ -113,6 +138,8 @@ def __init__( request: logging_config.ListBucketsRequest, response: logging_config.ListBucketsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -124,12 +151,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListBucketsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +172,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: @@ -179,6 +216,8 @@ def __init__( request: logging_config.ListViewsRequest, response: logging_config.ListViewsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -190,12 +229,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListViewsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -206,7 +250,12 @@ def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogView]: @@ -241,6 +290,8 @@ def __init__( request: logging_config.ListViewsRequest, response: logging_config.ListViewsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -252,12 +303,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListViewsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -268,7 +324,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogView]: @@ -307,6 +368,8 @@ def __init__( request: logging_config.ListSinksRequest, response: logging_config.ListSinksResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -318,12 +381,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListSinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -334,7 +402,12 @@ def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogSink]: @@ -369,6 +442,8 @@ def __init__( request: logging_config.ListSinksRequest, response: logging_config.ListSinksResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -380,12 +455,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListSinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -396,7 +476,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: @@ -435,6 +520,8 @@ def __init__( request: logging_config.ListLinksRequest, response: logging_config.ListLinksResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -446,12 +533,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLinksResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListLinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -462,7 +554,12 @@ def pages(self) -> Iterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.Link]: @@ -497,6 +594,8 @@ def __init__( request: logging_config.ListLinksRequest, response: logging_config.ListLinksResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -508,12 +607,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLinksResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListLinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -524,7 +628,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.Link]: @@ -563,6 +672,8 @@ def __init__( request: logging_config.ListExclusionsRequest, response: logging_config.ListExclusionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -574,12 +685,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -590,7 +706,12 @@ def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_config.LogExclusion]: @@ -625,6 +746,8 @@ def __init__( request: logging_config.ListExclusionsRequest, response: logging_config.ListExclusionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -636,12 +759,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -652,7 +780,12 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index e9b3dae14..ac03c526d 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -90,6 +90,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -102,7 +104,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index ccb53fe66..0764afcfd 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -53,7 +53,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -73,14 +73,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -90,11 +93,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -121,9 +124,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -162,7 +166,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 41894f1eb..e4a8d16f9 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -68,7 +70,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -98,7 +99,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -118,15 +119,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -136,11 +140,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -167,9 +171,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -207,7 +212,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -1236,6 +1243,248 @@ def copy_log_entries( ) return self._stubs["copy_log_entries"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_buckets: gapic_v1.method_async.wrap_method( + self.list_buckets, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket: gapic_v1.method_async.wrap_method( + self.get_bucket, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket_async: gapic_v1.method_async.wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: gapic_v1.method_async.wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket: gapic_v1.method_async.wrap_method( + self.create_bucket, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket: gapic_v1.method_async.wrap_method( + self.update_bucket, + default_timeout=None, + client_info=client_info, + ), + self.delete_bucket: gapic_v1.method_async.wrap_method( + self.delete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.undelete_bucket: gapic_v1.method_async.wrap_method( + self.undelete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.list_views: gapic_v1.method_async.wrap_method( + self.list_views, + default_timeout=None, + client_info=client_info, + ), + self.get_view: gapic_v1.method_async.wrap_method( + self.get_view, + default_timeout=None, + client_info=client_info, + ), + self.create_view: gapic_v1.method_async.wrap_method( + self.create_view, + default_timeout=None, + client_info=client_info, + ), + self.update_view: gapic_v1.method_async.wrap_method( + self.update_view, + default_timeout=None, + client_info=client_info, + ), + self.delete_view: gapic_v1.method_async.wrap_method( + self.delete_view, + default_timeout=None, + client_info=client_info, + ), + self.list_sinks: gapic_v1.method_async.wrap_method( + self.list_sinks, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sink: gapic_v1.method_async.wrap_method( + self.get_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_sink: gapic_v1.method_async.wrap_method( + self.create_sink, + default_timeout=120.0, + client_info=client_info, + ), + self.update_sink: gapic_v1.method_async.wrap_method( + self.update_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_sink: gapic_v1.method_async.wrap_method( + self.delete_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_link: gapic_v1.method_async.wrap_method( + self.create_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_link: gapic_v1.method_async.wrap_method( + self.delete_link, + default_timeout=None, + client_info=client_info, + ), + self.list_links: gapic_v1.method_async.wrap_method( + self.list_links, + default_timeout=None, + client_info=client_info, + ), + self.get_link: gapic_v1.method_async.wrap_method( + self.get_link, + default_timeout=None, + client_info=client_info, + ), + self.list_exclusions: gapic_v1.method_async.wrap_method( + self.list_exclusions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_exclusion: gapic_v1.method_async.wrap_method( + self.get_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_exclusion: gapic_v1.method_async.wrap_method( + self.create_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.update_exclusion: gapic_v1.method_async.wrap_method( + self.update_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.delete_exclusion: gapic_v1.method_async.wrap_method( + self.delete_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cmek_settings: gapic_v1.method_async.wrap_method( + self.get_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_cmek_settings: gapic_v1.method_async.wrap_method( + self.update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.get_settings: gapic_v1.method_async.wrap_method( + self.get_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_settings: gapic_v1.method_async.wrap_method( + self.update_settings, + default_timeout=None, + client_info=client_info, + ), + self.copy_log_entries: gapic_v1.method_async.wrap_method( + self.copy_log_entries, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 890361b49..27e8ca226 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -40,6 +41,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -199,7 +201,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -211,9 +217,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -323,8 +331,8 @@ async def sample_delete_log(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: raise ValueError( @@ -332,7 +340,10 @@ async def sample_delete_log(): "the individual field arguments should be set." ) - request = logging.DeleteLogRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -341,22 +352,9 @@ async def sample_delete_log(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log + ] # Certain fields should be provided within the metadata header; # add these here. @@ -524,8 +522,8 @@ async def sample_write_log_entries(): Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: raise ValueError( @@ -533,7 +531,10 @@ async def sample_write_log_entries(): "the individual field arguments should be set." ) - request = logging.WriteLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -549,22 +550,9 @@ async def sample_write_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.write_log_entries, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.write_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -688,8 +676,8 @@ async def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: raise ValueError( @@ -697,7 +685,10 @@ async def sample_list_log_entries(): "the individual field arguments should be set." ) - request = logging.ListLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -710,22 +701,9 @@ async def sample_list_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_entries, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -744,6 +722,8 @@ async def sample_list_log_entries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -809,26 +789,16 @@ async def sample_list_monitored_resource_descriptors(): """ # Create or coerce a protobuf request object. - request = logging.ListMonitoredResourceDescriptorsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_monitored_resource_descriptors + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -847,6 +817,8 @@ async def sample_list_monitored_resource_descriptors(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -923,8 +895,8 @@ async def sample_list_logs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -932,7 +904,10 @@ async def sample_list_logs(): "the individual field arguments should be set." ) - request = logging.ListLogsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -941,22 +916,9 @@ async def sample_list_logs(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_logs, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_logs + ] # Certain fields should be provided within the metadata header; # add these here. @@ -981,6 +943,8 @@ async def sample_list_logs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1052,22 +1016,9 @@ def request_generator(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.tail_log_entries, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.tail_log_entries + ] # Validate the universe domain. self._client._validate_universe_domain() diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index a9d6e082b..2c86aecca 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -526,7 +527,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -538,9 +543,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -649,8 +656,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[LoggingServiceV2Transport], + Callable[..., LoggingServiceV2Transport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LoggingServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -728,8 +743,8 @@ def sample_delete_log(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: raise ValueError( @@ -737,10 +752,8 @@ def sample_delete_log(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.DeleteLogRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.DeleteLogRequest): request = logging.DeleteLogRequest(request) # If we have keyword arguments corresponding to fields on the @@ -918,8 +931,8 @@ def sample_write_log_entries(): Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: raise ValueError( @@ -927,10 +940,8 @@ def sample_write_log_entries(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.WriteLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.WriteLogEntriesRequest): request = logging.WriteLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1070,8 +1081,8 @@ def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: raise ValueError( @@ -1079,10 +1090,8 @@ def sample_list_log_entries(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListLogEntriesRequest): request = logging.ListLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1115,6 +1124,8 @@ def sample_list_log_entries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1180,10 +1191,8 @@ def sample_list_monitored_resource_descriptors(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListMonitoredResourceDescriptorsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -1210,6 +1219,8 @@ def sample_list_monitored_resource_descriptors(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1286,8 +1297,8 @@ def sample_list_logs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -1295,10 +1306,8 @@ def sample_list_logs(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListLogsRequest): request = logging.ListLogsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1333,6 +1342,8 @@ def sample_list_logs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/google/cloud/logging_v2/services/logging_service_v2/pagers.py index f8a63387b..0eece8acc 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging @@ -53,6 +66,8 @@ def __init__( request: logging.ListLogEntriesRequest, response: logging.ListLogEntriesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -64,12 +79,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogEntriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -80,7 +100,12 @@ def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[log_entry.LogEntry]: @@ -115,6 +140,8 @@ def __init__( request: logging.ListLogEntriesRequest, response: logging.ListLogEntriesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -126,12 +153,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogEntriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -142,7 +174,12 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: @@ -181,6 +218,8 @@ def __init__( request: logging.ListMonitoredResourceDescriptorsRequest, response: logging.ListMonitoredResourceDescriptorsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -192,12 +231,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -208,7 +252,12 @@ def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: @@ -245,6 +294,8 @@ def __init__( request: logging.ListMonitoredResourceDescriptorsRequest, response: logging.ListMonitoredResourceDescriptorsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -256,12 +307,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -274,7 +330,12 @@ async def pages( yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__( @@ -315,6 +376,8 @@ def __init__( request: logging.ListLogsRequest, response: logging.ListLogsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -326,12 +389,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -342,7 +410,12 @@ def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[str]: @@ -377,6 +450,8 @@ def __init__( request: logging.ListLogsRequest, response: logging.ListLogsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -388,12 +463,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -404,7 +484,12 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[str]: diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 5bbd7cc78..6f7e1c99d 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -90,6 +90,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -102,7 +104,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 8a6a3efd3..bce7e8ffc 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -52,7 +52,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -72,14 +72,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -89,11 +92,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -119,9 +122,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -160,7 +164,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 159a0e2e4..f03c1fad7 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -67,7 +69,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -97,7 +98,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -117,15 +118,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -135,11 +139,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -165,9 +169,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -205,7 +210,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -420,6 +427,107 @@ def tail_log_entries( ) return self._stubs["tail_log_entries"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.delete_log: gapic_v1.method_async.wrap_method( + self.delete_log, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: gapic_v1.method_async.wrap_method( + self.write_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: gapic_v1.method_async.wrap_method( + self.list_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: gapic_v1.method_async.wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: gapic_v1.method_async.wrap_method( + self.list_logs, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.tail_log_entries: gapic_v1.method_async.wrap_method( + self.tail_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 1053158e9..19513e126 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -197,7 +199,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -209,9 +215,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -323,8 +331,8 @@ async def sample_list_log_metrics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -332,7 +340,10 @@ async def sample_list_log_metrics(): "the individual field arguments should be set." ) - request = logging_metrics.ListLogMetricsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -341,22 +352,9 @@ async def sample_list_log_metrics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_metrics, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_log_metrics + ] # Certain fields should be provided within the metadata header; # add these here. @@ -381,6 +379,8 @@ async def sample_list_log_metrics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -460,8 +460,8 @@ async def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError( @@ -469,7 +469,10 @@ async def sample_get_log_metric(): "the individual field arguments should be set." ) - request = logging_metrics.GetLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -478,22 +481,9 @@ async def sample_get_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_log_metric, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -607,8 +597,8 @@ async def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: raise ValueError( @@ -616,7 +606,10 @@ async def sample_create_log_metric(): "the individual field arguments should be set." ) - request = logging_metrics.CreateLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -627,11 +620,9 @@ async def sample_create_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_log_metric, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -742,8 +733,8 @@ async def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: raise ValueError( @@ -751,7 +742,10 @@ async def sample_update_log_metric(): "the individual field arguments should be set." ) - request = logging_metrics.UpdateLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -762,22 +756,9 @@ async def sample_update_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_log_metric, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. @@ -855,8 +836,8 @@ async def sample_delete_log_metric(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError( @@ -864,7 +845,10 @@ async def sample_delete_log_metric(): "the individual field arguments should be set." ) - request = logging_metrics.DeleteLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -873,22 +857,9 @@ async def sample_delete_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log_metric, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_log_metric + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index 9309f5c17..5f577decf 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -525,7 +526,11 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport]] = None, + transport: Optional[ + Union[ + str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport] + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -537,9 +542,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -648,8 +655,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[MetricsServiceV2Transport], + Callable[..., MetricsServiceV2Transport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MetricsServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -729,8 +744,8 @@ def sample_list_log_metrics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -738,10 +753,8 @@ def sample_list_log_metrics(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.ListLogMetricsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.ListLogMetricsRequest): request = logging_metrics.ListLogMetricsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -776,6 +789,8 @@ def sample_list_log_metrics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -855,8 +870,8 @@ def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError( @@ -864,10 +879,8 @@ def sample_get_log_metric(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.GetLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.GetLogMetricRequest): request = logging_metrics.GetLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -991,8 +1004,8 @@ def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: raise ValueError( @@ -1000,10 +1013,8 @@ def sample_create_log_metric(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.CreateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.CreateLogMetricRequest): request = logging_metrics.CreateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1126,8 +1137,8 @@ def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: raise ValueError( @@ -1135,10 +1146,8 @@ def sample_update_log_metric(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.UpdateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.UpdateLogMetricRequest): request = logging_metrics.UpdateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1228,8 +1237,8 @@ def sample_delete_log_metric(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1237,10 +1246,8 @@ def sample_delete_log_metric(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.DeleteLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.DeleteLogMetricRequest): request = logging_metrics.DeleteLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 70bad4bea..6975ae0d9 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.logging_v2.types import logging_metrics @@ -51,6 +64,8 @@ def __init__( request: logging_metrics.ListLogMetricsRequest, response: logging_metrics.ListLogMetricsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -62,12 +77,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +98,12 @@ def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[logging_metrics.LogMetric]: @@ -113,6 +138,8 @@ def __init__( request: logging_metrics.ListLogMetricsRequest, response: logging_metrics.ListLogMetricsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -124,12 +151,17 @@ def __init__( The initial request object. response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +172,12 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index f63d896b2..aeb86e1e9 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -90,6 +90,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -102,7 +104,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 3c4a2f38f..1b16e9701 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -52,7 +52,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -72,14 +72,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -89,11 +92,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -119,9 +122,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -160,7 +164,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 33f85cc96..81c675d85 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -67,7 +69,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -97,7 +98,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -117,15 +118,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -135,11 +139,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -165,9 +169,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -205,7 +210,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -373,6 +380,80 @@ def delete_log_metric( ) return self._stubs["delete_log_metric"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_log_metrics: gapic_v1.method_async.wrap_method( + self.list_log_metrics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_log_metric: gapic_v1.method_async.wrap_method( + self.get_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_log_metric: gapic_v1.method_async.wrap_method( + self.create_log_metric, + default_timeout=60.0, + client_info=client_info, + ), + self.update_log_metric: gapic_v1.method_async.wrap_method( + self.update_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_log_metric: gapic_v1.method_async.wrap_method( + self.delete_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/pytest.ini b/pytest.ini index 5cad3409b..2d8ce14b8 100644 --- a/pytest.ini +++ b/pytest.ini @@ -25,3 +25,5 @@ filterwarnings = ignore:'pkgutil.get_loader' is deprecated and slated for removal in Python 3.14; use importlib.util.find_spec\(\) instead:DeprecationWarning # Remove warning once https://github.com/protocolbuffers/protobuf/issues/17345 is fixed ignore:.*Please use message_factory.GetMessageClass\(\) instead. SymbolDatabase.GetPrototype\(\) will be removed soon.:UserWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/2046 is fixed + ignore:coroutine 'AsyncMockMixin._execute_mock_call' was never awaited:RuntimeWarning diff --git a/samples/generated_samples/snippet_metadata_google.logging.v2.json b/samples/generated_samples/snippet_metadata_google.logging.v2.json index 9d5a375e9..b62675ba6 100644 --- a/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.11.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/setup.py b/setup.py index 5414f1b08..1e214751d 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,7 @@ "opentelemetry-api >= 1.0.0", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-logging" diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 3aded209e..fa18c36c0 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -7,7 +7,7 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.0 -protobuf==3.19.5 +protobuf==3.20.2 google-cloud-core==2.0.0 # Lower bound testing for optional dependencies diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index b1c25ba9e..72e028529 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -40,6 +40,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ( @@ -1169,6 +1170,9 @@ def test_list_buckets_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_buckets() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1193,6 +1197,9 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_buckets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1202,6 +1209,41 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): ) +def test_list_buckets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_buckets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc + request = {} + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_buckets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1225,6 +1267,47 @@ async def test_list_buckets_empty_call_async(): assert args[0] == logging_config.ListBucketsRequest() +@pytest.mark.asyncio +async def test_list_buckets_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_buckets + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_buckets + ] = mock_object + + request = {} + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest @@ -1444,13 +1527,17 @@ def test_list_buckets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_buckets(request={}) + pager = client.list_buckets(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -1655,6 +1742,9 @@ def test_get_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1678,6 +1768,9 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1686,6 +1779,41 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): ) +def test_get_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc + request = {} + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1715,6 +1843,45 @@ async def test_get_bucket_empty_call_async(): assert args[0] == logging_config.GetBucketRequest() +@pytest.mark.asyncio +async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_bucket + ] = mock_object + + request = {} + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest @@ -1874,6 +2041,9 @@ def test_create_bucket_async_empty_call(): with mock.patch.object( type(client.transport.create_bucket_async), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1900,6 +2070,9 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_bucket_async), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1909,6 +2082,49 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): ) +def test_create_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_bucket_async in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_bucket_async + ] = mock_rpc + request = {} + client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_bucket_async_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1932,6 +2148,51 @@ async def test_create_bucket_async_empty_call_async(): assert args[0] == logging_config.CreateBucketRequest() +@pytest.mark.asyncio +async def test_create_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_bucket_async + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket_async + ] = mock_object + + request = {} + await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest @@ -2082,6 +2343,9 @@ def test_update_bucket_async_empty_call(): with mock.patch.object( type(client.transport.update_bucket_async), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2107,6 +2371,9 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_bucket_async), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2115,6 +2382,49 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): ) +def test_update_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_bucket_async in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_bucket_async + ] = mock_rpc + request = {} + client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_bucket_async_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2138,6 +2448,51 @@ async def test_update_bucket_async_empty_call_async(): assert args[0] == logging_config.UpdateBucketRequest() +@pytest.mark.asyncio +async def test_update_bucket_async_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_bucket_async + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket_async + ] = mock_object + + request = {} + await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_bucket_async_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest @@ -2299,6 +2654,9 @@ def test_create_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2323,6 +2681,9 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2332,6 +2693,41 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): ) +def test_create_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc + request = {} + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2361,6 +2757,47 @@ async def test_create_bucket_empty_call_async(): assert args[0] == logging_config.CreateBucketRequest() +@pytest.mark.asyncio +async def test_create_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_bucket + ] = mock_object + + request = {} + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest @@ -2531,6 +2968,9 @@ def test_update_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2554,6 +2994,9 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2562,6 +3005,41 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): ) +def test_update_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc + request = {} + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2591,6 +3069,47 @@ async def test_update_bucket_empty_call_async(): assert args[0] == logging_config.UpdateBucketRequest() +@pytest.mark.asyncio +async def test_update_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_bucket + ] = mock_object + + request = {} + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest @@ -2746,6 +3265,9 @@ def test_delete_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2769,6 +3291,9 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2777,6 +3302,41 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): ) +def test_delete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc + request = {} + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2796,6 +3356,47 @@ async def test_delete_bucket_empty_call_async(): assert args[0] == logging_config.DeleteBucketRequest() +@pytest.mark.asyncio +async def test_delete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_bucket + ] = mock_object + + request = {} + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest @@ -2932,6 +3533,9 @@ def test_undelete_bucket_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.undelete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2955,6 +3559,9 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.undelete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2963,6 +3570,41 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): ) +def test_undelete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.undelete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc + request = {} + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_undelete_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2982,6 +3624,47 @@ async def test_undelete_bucket_empty_call_async(): assert args[0] == logging_config.UndeleteBucketRequest() +@pytest.mark.asyncio +async def test_undelete_bucket_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.undelete_bucket + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.undelete_bucket + ] = mock_object + + request = {} + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest @@ -3121,6 +3804,9 @@ def test_list_views_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_views() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3145,6 +3831,9 @@ def test_list_views_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_views(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3154,6 +3843,41 @@ def test_list_views_non_empty_request_with_auto_populated_field(): ) +def test_list_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_views in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + request = {} + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_views_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3177,6 +3901,45 @@ async def test_list_views_empty_call_async(): assert args[0] == logging_config.ListViewsRequest() +@pytest.mark.asyncio +async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_views + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_views + ] = mock_object + + request = {} + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest @@ -3396,13 +4159,17 @@ def test_list_views_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_views(request={}) + pager = client.list_views(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3599,6 +4366,9 @@ def test_get_view_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3622,6 +4392,9 @@ def test_get_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3630,6 +4403,41 @@ def test_get_view_non_empty_request_with_auto_populated_field(): ) +def test_get_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + request = {} + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3655,6 +4463,45 @@ async def test_get_view_empty_call_async(): assert args[0] == logging_config.GetViewRequest() +@pytest.mark.asyncio +async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_view + ] = mock_object + + request = {} + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest @@ -3809,6 +4656,9 @@ def test_create_view_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3833,6 +4683,9 @@ def test_create_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3842,6 +4695,41 @@ def test_create_view_non_empty_request_with_auto_populated_field(): ) +def test_create_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_view] = mock_rpc + request = {} + client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3867,6 +4755,47 @@ async def test_create_view_empty_call_async(): assert args[0] == logging_config.CreateViewRequest() +@pytest.mark.asyncio +async def test_create_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_view + ] = mock_object + + request = {} + await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest @@ -4021,6 +4950,9 @@ def test_update_view_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4044,6 +4976,9 @@ def test_update_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4052,6 +4987,41 @@ def test_update_view_non_empty_request_with_auto_populated_field(): ) +def test_update_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_view] = mock_rpc + request = {} + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4077,6 +5047,47 @@ async def test_update_view_empty_call_async(): assert args[0] == logging_config.UpdateViewRequest() +@pytest.mark.asyncio +async def test_update_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_view + ] = mock_object + + request = {} + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest @@ -4224,6 +5235,9 @@ def test_delete_view_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4247,6 +5261,9 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4255,23 +5272,99 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): ) -@pytest.mark.asyncio -async def test_delete_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) +def test_delete_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + request = {} + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_view), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + +@pytest.mark.asyncio +async def test_delete_view_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_view + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_view + ] = mock_object + + request = {} + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_view(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_view), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 @pytest.mark.asyncio @@ -4413,6 +5506,9 @@ def test_list_sinks_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_sinks() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4437,6 +5533,9 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_sinks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4446,6 +5545,41 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): ) +def test_list_sinks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sinks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc + request = {} + client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_sinks_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4469,6 +5603,45 @@ async def test_list_sinks_empty_call_async(): assert args[0] == logging_config.ListSinksRequest() +@pytest.mark.asyncio +async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_sinks + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_sinks + ] = mock_object + + request = {} + await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest @@ -4688,13 +5861,17 @@ def test_list_sinks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_sinks(request={}) + pager = client.list_sinks(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -4901,6 +6078,9 @@ def test_get_sink_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4924,6 +6104,9 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4932,6 +6115,41 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): ) +def test_get_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc + request = {} + client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4962,6 +6180,45 @@ async def test_get_sink_empty_call_async(): assert args[0] == logging_config.GetSinkRequest() +@pytest.mark.asyncio +async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_sink + ] = mock_object + + request = {} + await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest @@ -5218,6 +6475,9 @@ def test_create_sink_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5241,6 +6501,9 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5249,6 +6512,41 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): ) +def test_create_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc + request = {} + client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5279,6 +6577,47 @@ async def test_create_sink_empty_call_async(): assert args[0] == logging_config.CreateSinkRequest() +@pytest.mark.asyncio +async def test_create_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_sink + ] = mock_object + + request = {} + await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest @@ -5545,6 +6884,9 @@ def test_update_sink_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5568,6 +6910,9 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5576,6 +6921,41 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): ) +def test_update_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc + request = {} + client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5606,6 +6986,47 @@ async def test_update_sink_empty_call_async(): assert args[0] == logging_config.UpdateSinkRequest() +@pytest.mark.asyncio +async def test_update_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_sink + ] = mock_object + + request = {} + await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest @@ -5865,6 +7286,9 @@ def test_delete_sink_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5888,6 +7312,9 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5896,6 +7323,41 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): ) +def test_delete_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc + request = {} + client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5915,6 +7377,47 @@ async def test_delete_sink_empty_call_async(): assert args[0] == logging_config.DeleteSinkRequest() +@pytest.mark.asyncio +async def test_delete_sink_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_sink + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_sink + ] = mock_object + + request = {} + await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest @@ -6131,6 +7634,9 @@ def test_create_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6155,6 +7661,9 @@ def test_create_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6164,25 +7673,109 @@ def test_create_link_non_empty_request_with_auto_populated_field(): ) -@pytest.mark.asyncio -async def test_create_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) +def test_create_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_link] = mock_rpc + request = {} + client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest() + + +@pytest.mark.asyncio +async def test_create_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_link + ] = mock_object + + request = {} + await client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_link(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_link), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - response = await client.create_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 @pytest.mark.asyncio @@ -6427,6 +8020,9 @@ def test_delete_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6450,6 +8046,9 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6458,6 +8057,45 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): ) +def test_delete_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc + request = {} + client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6479,6 +8117,51 @@ async def test_delete_link_empty_call_async(): assert args[0] == logging_config.DeleteLinkRequest() +@pytest.mark.asyncio +async def test_delete_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_link + ] = mock_object + + request = {} + await client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_link_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteLinkRequest @@ -6704,6 +8387,9 @@ def test_list_links_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_links() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6728,6 +8414,9 @@ def test_list_links_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_links), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6737,6 +8426,41 @@ def test_list_links_non_empty_request_with_auto_populated_field(): ) +def test_list_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_links in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_links] = mock_rpc + request = {} + client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_links_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6760,6 +8484,45 @@ async def test_list_links_empty_call_async(): assert args[0] == logging_config.ListLinksRequest() +@pytest.mark.asyncio +async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_links + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_links + ] = mock_object + + request = {} + await client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_links_async( transport: str = "grpc_asyncio", request_type=logging_config.ListLinksRequest @@ -6979,13 +8742,17 @@ def test_list_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_links(request={}) + pager = client.list_links(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -7182,6 +8949,9 @@ def test_get_link_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7205,6 +8975,9 @@ def test_get_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7213,6 +8986,41 @@ def test_get_link_non_empty_request_with_auto_populated_field(): ) +def test_get_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_link] = mock_rpc + request = {} + client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7238,6 +9046,45 @@ async def test_get_link_empty_call_async(): assert args[0] == logging_config.GetLinkRequest() +@pytest.mark.asyncio +async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_link + ] = mock_object + + request = {} + await client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_link_async( transport: str = "grpc_asyncio", request_type=logging_config.GetLinkRequest @@ -7466,6 +9313,9 @@ def test_list_exclusions_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7490,6 +9340,9 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_exclusions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7499,6 +9352,41 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): ) +def test_list_exclusions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_exclusions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc + request = {} + client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_exclusions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7522,6 +9410,47 @@ async def test_list_exclusions_empty_call_async(): assert args[0] == logging_config.ListExclusionsRequest() +@pytest.mark.asyncio +async def test_list_exclusions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_exclusions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_exclusions + ] = mock_object + + request = {} + await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest @@ -7741,13 +9670,17 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_exclusions(request={}) + pager = client.list_exclusions(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -7946,6 +9879,9 @@ def test_get_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7969,6 +9905,9 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7977,6 +9916,41 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): ) +def test_get_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc + request = {} + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8003,6 +9977,47 @@ async def test_get_exclusion_empty_call_async(): assert args[0] == logging_config.GetExclusionRequest() +@pytest.mark.asyncio +async def test_get_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_exclusion + ] = mock_object + + request = {} + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest @@ -8243,6 +10258,9 @@ def test_create_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8266,6 +10284,9 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8274,6 +10295,43 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): ) +def test_create_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_exclusion + ] = mock_rpc + request = {} + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8300,6 +10358,47 @@ async def test_create_exclusion_empty_call_async(): assert args[0] == logging_config.CreateExclusionRequest() +@pytest.mark.asyncio +async def test_create_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_exclusion + ] = mock_object + + request = {} + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest @@ -8550,6 +10649,9 @@ def test_update_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8571,14 +10673,54 @@ def test_update_exclusion_non_empty_request_with_auto_populated_field(): name="name_value", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: - client.update_exclusion(request=request) - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest( + name="name_value", + ) + + +def test_update_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.update_exclusion + ] = mock_rpc + request = {} + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -8607,6 +10749,47 @@ async def test_update_exclusion_empty_call_async(): assert args[0] == logging_config.UpdateExclusionRequest() +@pytest.mark.asyncio +async def test_update_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_exclusion + ] = mock_object + + request = {} + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest @@ -8858,6 +11041,9 @@ def test_delete_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8881,6 +11067,9 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8889,6 +11078,43 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): ) +def test_delete_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_exclusion + ] = mock_rpc + request = {} + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8908,6 +11134,47 @@ async def test_delete_exclusion_empty_call_async(): assert args[0] == logging_config.DeleteExclusionRequest() +@pytest.mark.asyncio +async def test_delete_exclusion_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_exclusion + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_exclusion + ] = mock_object + + request = {} + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest @@ -9137,6 +11404,9 @@ def test_get_cmek_settings_empty_call(): with mock.patch.object( type(client.transport.get_cmek_settings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9162,6 +11432,9 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_cmek_settings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9170,6 +11443,43 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): ) +def test_get_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cmek_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_cmek_settings + ] = mock_rpc + request = {} + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_cmek_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9198,6 +11508,47 @@ async def test_get_cmek_settings_empty_call_async(): assert args[0] == logging_config.GetCmekSettingsRequest() +@pytest.mark.asyncio +async def test_get_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_cmek_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_cmek_settings + ] = mock_object + + request = {} + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest @@ -9366,6 +11717,9 @@ def test_update_cmek_settings_empty_call(): with mock.patch.object( type(client.transport.update_cmek_settings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9391,6 +11745,9 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_cmek_settings), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9399,6 +11756,45 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): ) +def test_update_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_cmek_settings in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_cmek_settings + ] = mock_rpc + request = {} + client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_cmek_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9427,6 +11823,47 @@ async def test_update_cmek_settings_empty_call_async(): assert args[0] == logging_config.UpdateCmekSettingsRequest() +@pytest.mark.asyncio +async def test_update_cmek_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_cmek_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_cmek_settings + ] = mock_object + + request = {} + await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_cmek_settings_async( transport: str = "grpc_asyncio", @@ -9594,6 +12031,9 @@ def test_get_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9617,6 +12057,9 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9625,6 +12068,41 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): ) +def test_get_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + request = {} + client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9652,6 +12130,47 @@ async def test_get_settings_empty_call_async(): assert args[0] == logging_config.GetSettingsRequest() +@pytest.mark.asyncio +async def test_get_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_settings + ] = mock_object + + request = {} + await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest @@ -9896,6 +12415,9 @@ def test_update_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9919,6 +12441,9 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9927,6 +12452,41 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): ) +def test_update_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + request = {} + client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9954,6 +12514,47 @@ async def test_update_settings_empty_call_async(): assert args[0] == logging_config.UpdateSettingsRequest() +@pytest.mark.asyncio +async def test_update_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_settings + ] = mock_object + + request = {} + await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest @@ -10197,6 +12798,9 @@ def test_copy_log_entries_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.copy_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -10222,6 +12826,9 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.copy_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -10232,6 +12839,47 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): ) +def test_copy_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.copy_log_entries + ] = mock_rpc + request = {} + client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_copy_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -10253,6 +12901,51 @@ async def test_copy_log_entries_empty_call_async(): assert args[0] == logging_config.CopyLogEntriesRequest() +@pytest.mark.asyncio +async def test_copy_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.copy_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.copy_log_entries + ] = mock_object + + request = {} + await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_copy_log_entries_async( transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index facbea0fa..b1cae4824 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -37,6 +37,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import ( @@ -1169,6 +1170,9 @@ def test_delete_log_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1192,6 +1196,9 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1200,6 +1207,41 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): ) +def test_delete_log_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc + request = {} + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_log_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1219,6 +1261,45 @@ async def test_delete_log_empty_call_async(): assert args[0] == logging.DeleteLogRequest() +@pytest.mark.asyncio +async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_log + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_log + ] = mock_object + + request = {} + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest @@ -1439,6 +1520,9 @@ def test_write_log_entries_empty_call(): with mock.patch.object( type(client.transport.write_log_entries), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.write_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1464,6 +1548,9 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.write_log_entries), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.write_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1472,6 +1559,43 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): ) +def test_write_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.write_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.write_log_entries + ] = mock_rpc + request = {} + client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_write_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1495,6 +1619,47 @@ async def test_write_log_entries_empty_call_async(): assert args[0] == logging.WriteLogEntriesRequest() +@pytest.mark.asyncio +async def test_write_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.write_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.write_log_entries + ] = mock_object + + request = {} + await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest @@ -1695,6 +1860,9 @@ def test_list_log_entries_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1720,6 +1888,9 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1730,6 +1901,43 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): ) +def test_list_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_log_entries + ] = mock_rpc + request = {} + client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1753,6 +1961,47 @@ async def test_list_log_entries_empty_call_async(): assert args[0] == logging.ListLogEntriesRequest() +@pytest.mark.asyncio +async def test_list_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_log_entries + ] = mock_object + + request = {} + await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest @@ -1931,10 +2180,14 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - pager = client.list_log_entries(request={}) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_log_entries(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2131,6 +2384,9 @@ def test_list_monitored_resource_descriptors_empty_call(): with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_monitored_resource_descriptors() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2156,6 +2412,9 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_monitored_resource_descriptors(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2164,6 +2423,46 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat ) +def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_monitored_resource_descriptors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_monitored_resource_descriptors + ] = mock_rpc + request = {} + client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2189,6 +2488,47 @@ async def test_list_monitored_resource_descriptors_empty_call_async(): assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_monitored_resource_descriptors + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_monitored_resource_descriptors + ] = mock_object + + request = {} + await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async( transport: str = "grpc_asyncio", @@ -2270,10 +2610,16 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () - pager = client.list_monitored_resource_descriptors(request={}) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_monitored_resource_descriptors( + request={}, retry=retry, timeout=timeout + ) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2480,6 +2826,9 @@ def test_list_logs_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_logs() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2504,6 +2853,9 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_logs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2513,6 +2865,41 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): ) +def test_list_logs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_logs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc + request = {} + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_logs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2537,6 +2924,45 @@ async def test_list_logs_empty_call_async(): assert args[0] == logging.ListLogsRequest() +@pytest.mark.asyncio +async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_logs + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_logs + ] = mock_object + + request = {} + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest @@ -2758,13 +3184,17 @@ def test_list_logs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_logs(request={}) + pager = client.list_logs(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2945,6 +3375,84 @@ def test_tail_log_entries(request_type, transport: str = "grpc"): assert isinstance(message, logging.TailLogEntriesResponse) +def test_tail_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.tail_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.tail_log_entries + ] = mock_rpc + request = [{}] + client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.tail_log_entries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.tail_log_entries + ] = mock_object + + request = [{}] + await client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index abeaa4c6e..7909609fa 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -40,6 +40,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import ( @@ -1185,6 +1186,9 @@ def test_list_log_metrics_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_metrics() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1209,6 +1213,9 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_log_metrics(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1218,6 +1225,43 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): ) +def test_list_log_metrics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_metrics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_log_metrics + ] = mock_rpc + request = {} + client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_log_metrics_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1241,6 +1285,47 @@ async def test_list_log_metrics_empty_call_async(): assert args[0] == logging_metrics.ListLogMetricsRequest() +@pytest.mark.asyncio +async def test_list_log_metrics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_log_metrics + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_log_metrics + ] = mock_object + + request = {} + await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest @@ -1460,13 +1545,17 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_log_metrics(request={}) + pager = client.list_log_metrics(request={}, retry=retry, timeout=timeout) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -1671,6 +1760,9 @@ def test_get_log_metric_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1694,6 +1786,9 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1702,6 +1797,41 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): ) +def test_get_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc + request = {} + client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1731,6 +1861,47 @@ async def test_get_log_metric_empty_call_async(): assert args[0] == logging_metrics.GetLogMetricRequest() +@pytest.mark.asyncio +async def test_get_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_log_metric + ] = mock_object + + request = {} + await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest @@ -1987,6 +2158,9 @@ def test_create_log_metric_empty_call(): with mock.patch.object( type(client.transport.create_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2012,6 +2186,9 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2020,6 +2197,43 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): ) +def test_create_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_log_metric + ] = mock_rpc + request = {} + client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2051,6 +2265,47 @@ async def test_create_log_metric_empty_call_async(): assert args[0] == logging_metrics.CreateLogMetricRequest() +@pytest.mark.asyncio +async def test_create_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_log_metric + ] = mock_object + + request = {} + await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest @@ -2327,6 +2582,9 @@ def test_update_log_metric_empty_call(): with mock.patch.object( type(client.transport.update_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2352,6 +2610,9 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2360,6 +2621,43 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): ) +def test_update_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_log_metric + ] = mock_rpc + request = {} + client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2391,6 +2689,47 @@ async def test_update_log_metric_empty_call_async(): assert args[0] == logging_metrics.UpdateLogMetricRequest() +@pytest.mark.asyncio +async def test_update_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_log_metric + ] = mock_object + + request = {} + await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest @@ -2652,6 +2991,9 @@ def test_delete_log_metric_empty_call(): with mock.patch.object( type(client.transport.delete_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2677,6 +3019,9 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_log_metric), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2685,6 +3030,43 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): ) +def test_delete_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_log_metric + ] = mock_rpc + request = {} + client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2706,6 +3088,47 @@ async def test_delete_log_metric_empty_call_async(): assert args[0] == logging_metrics.DeleteLogMetricRequest() +@pytest.mark.asyncio +async def test_delete_log_metric_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_log_metric + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_log_metric + ] = mock_object + + request = {} + await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest From 5fcdbb6e74b67149f9d4e32247aec924138f22b7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 15:08:09 -0700 Subject: [PATCH 3/3] chore(main): release 3.11.1 (#927) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/logging/gapic_version.py | 2 +- google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 19f9217cb..2ecd919c7 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.11.0" + ".": "3.11.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 79fa00655..fe4702828 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.11.1](https://github.com/googleapis/python-logging/compare/v3.11.0...v3.11.1) (2024-08-06) + + +### Bug Fixes + +* Allow protobuf 5.x ([#888](https://github.com/googleapis/python-logging/issues/888)) ([7746e64](https://github.com/googleapis/python-logging/commit/7746e643af29b1008d6e6d6a9958c8337c958dd4)) + ## [3.11.0](https://github.com/googleapis/python-logging/compare/v3.10.0...v3.11.0) (2024-07-15) diff --git a/google/cloud/logging/gapic_version.py b/google/cloud/logging/gapic_version.py index 6c2e88f2b..f897ec818 100644 --- a/google/cloud/logging/gapic_version.py +++ b/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.0" # {x-release-please-version} +__version__ = "3.11.1" # {x-release-please-version} diff --git a/google/cloud/logging_v2/gapic_version.py b/google/cloud/logging_v2/gapic_version.py index 6c2e88f2b..f897ec818 100644 --- a/google/cloud/logging_v2/gapic_version.py +++ b/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.11.0" # {x-release-please-version} +__version__ = "3.11.1" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.logging.v2.json b/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba6..263c808b8 100644 --- a/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.11.1" }, "snippets": [ {