From 265061eae8396caaef3fdfeae80e0a120f9a5cda Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Feb 2022 15:54:57 +0000 Subject: [PATCH 01/36] chore: use gapic-generator-python 0.62.1 (#478) - [ ] Regenerate this pull request now. fix: resolve DuplicateCredentialArgs error when using credentials_file committer: parthea PiperOrigin-RevId: 425964861 Source-Link: https://github.com/googleapis/googleapis/commit/84b1a5a4f6fb2d04905be58e586b8a7a4310a8cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/4fb761bbd8506ac156f49bac5f18306aa8eb3aa8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGZiNzYxYmJkODUwNmFjMTU2ZjQ5YmFjNWYxODMwNmFhOGViM2FhOCJ9 --- .../config_service_v2/async_client.py | 32 +++---- .../services/config_service_v2/client.py | 32 +++---- .../config_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../logging_service_v2/async_client.py | 8 +- .../services/logging_service_v2/client.py | 8 +- .../logging_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../metrics_service_v2/async_client.py | 10 +-- .../services/metrics_service_v2/client.py | 10 +-- .../metrics_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../cloud/logging_v2/types/logging_config.py | 3 +- .../logging_v2/test_config_service_v2.py | 84 +++++++++++++++++- .../logging_v2/test_logging_service_v2.py | 85 ++++++++++++++++++- .../logging_v2/test_metrics_service_v2.py | 85 ++++++++++++++++++- 16 files changed, 320 insertions(+), 67 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index 664f10ada..81621a4e2 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -263,7 +263,7 @@ async def list_buckets( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -579,7 +579,7 @@ async def list_views( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -851,7 +851,7 @@ async def list_sinks( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -953,7 +953,7 @@ async def get_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1064,7 +1064,7 @@ async def create_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: @@ -1186,7 +1186,7 @@ async def update_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: @@ -1277,7 +1277,7 @@ async def delete_sink( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1368,7 +1368,7 @@ async def list_exclusions( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1473,7 +1473,7 @@ async def get_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1585,7 +1585,7 @@ async def create_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: @@ -1700,7 +1700,7 @@ async def update_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: @@ -1778,7 +1778,7 @@ async def delete_exclusion( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1847,8 +1847,8 @@ async def get_cmek_settings( The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1923,8 +1923,8 @@ async def update_cmek_settings( The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index f4a1be57c..10bed9347 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -499,7 +499,7 @@ def list_buckets( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -820,7 +820,7 @@ def list_views( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1096,7 +1096,7 @@ def list_sinks( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1187,7 +1187,7 @@ def get_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1287,7 +1287,7 @@ def create_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: @@ -1409,7 +1409,7 @@ def update_sink( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: @@ -1489,7 +1489,7 @@ def delete_sink( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1569,7 +1569,7 @@ def list_exclusions( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1663,7 +1663,7 @@ def get_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1764,7 +1764,7 @@ def create_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: @@ -1879,7 +1879,7 @@ def update_exclusion( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: @@ -1957,7 +1957,7 @@ def delete_exclusion( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -2015,8 +2015,8 @@ def get_cmek_settings( The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2092,8 +2092,8 @@ def update_cmek_settings( The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index b34d0a121..39d9d4f93 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -159,8 +159,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 1cf4f3121..b4228c690 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -204,8 +204,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index e14453424..dc8b56b81 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -258,7 +258,7 @@ async def delete_log( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: @@ -426,7 +426,7 @@ async def write_log_entries( Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: @@ -558,7 +558,7 @@ async def list_log_entries( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: @@ -717,7 +717,7 @@ async def list_logs( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 5815c8d19..b33821be5 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -440,7 +440,7 @@ def delete_log( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: @@ -597,7 +597,7 @@ def write_log_entries( Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: @@ -717,7 +717,7 @@ def list_log_entries( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: @@ -857,7 +857,7 @@ def list_logs( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 0379cbecf..4f5c9b1ca 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -159,8 +159,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 16602c2b4..27b094831 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -204,8 +204,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index eb7321ab7..311806df2 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -245,7 +245,7 @@ async def list_log_metrics( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -344,7 +344,7 @@ async def get_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: @@ -451,7 +451,7 @@ async def create_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: @@ -546,7 +546,7 @@ async def update_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: @@ -628,7 +628,7 @@ async def delete_log_metric( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index ced653a51..ade883811 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -439,7 +439,7 @@ def list_log_metrics( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -527,7 +527,7 @@ def get_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: @@ -623,7 +623,7 @@ def create_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: @@ -718,7 +718,7 @@ def update_log_metric( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: @@ -789,7 +789,7 @@ def delete_log_metric( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 194d341f3..7b72b756f 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -159,8 +159,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 37cec4a63..889d7072e 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -204,8 +204,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py index 3ea70506c..f064f26b7 100644 --- a/google/cloud/logging_v2/types/logging_config.py +++ b/google/cloud/logging_v2/types/logging_config.py @@ -120,8 +120,7 @@ class LogView(proto.Message): name (str): The resource name of the view. For example - "projects/my-project-id/locations/my- - location/buckets/my-bucket-id/views/my-view + "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view description (str): Describes this view. create_time (google.protobuf.timestamp_pb2.Timestamp): diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index efb46eaad..e7d2ea7d1 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -522,21 +522,28 @@ def test_config_service_v2_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), ( ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_config_service_v2_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -572,6 +579,77 @@ def test_config_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ConfigServiceV2Client, + transports.ConfigServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ConfigServiceV2AsyncClient, + transports.ConfigServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_config_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [logging_config.ListBucketsRequest, dict,]) def test_list_buckets(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 9f11a0210..0b3b202eb 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -529,21 +529,28 @@ def test_logging_service_v2_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), ( LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_logging_service_v2_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -579,6 +586,78 @@ def test_logging_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + LoggingServiceV2Client, + transports.LoggingServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + LoggingServiceV2AsyncClient, + transports.LoggingServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_logging_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [logging.DeleteLogRequest, dict,]) def test_delete_log(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 97a2c4a99..764a76121 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -527,21 +527,28 @@ def test_metrics_service_v2_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), ( MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_metrics_service_v2_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -577,6 +584,78 @@ def test_metrics_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MetricsServiceV2Client, + transports.MetricsServiceV2GrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MetricsServiceV2AsyncClient, + transports.MetricsServiceV2GrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_metrics_service_v2_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [logging_metrics.ListLogMetricsRequest, dict,]) def test_list_log_metrics(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( From 387d46d2641677b91a315c5413881fa710d49286 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 5 Feb 2022 01:00:49 +0100 Subject: [PATCH 02/36] chore(deps): update all dependencies (#477) --- samples/snippets/requirements-test.txt | 2 +- samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index fbe6c1c5c..27df4634c 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==6.2.5 +pytest==7.0.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 0ab529f1c..7e49254f4 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ -google-cloud-logging==2.7.0 +google-cloud-logging==3.0.0 google-cloud-bigquery==2.32.0 -google-cloud-storage==2.0.0; python_version == '3.6' +google-cloud-storage==2.1.0; python_version == '3.6' google-cloud-storage==2.1.0; python_version >= '3.7' google-cloud-pubsub==2.9.0 From 96bb6f786c91656b52624fbbf52e036b1a908d53 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 10 Feb 2022 12:14:30 -0800 Subject: [PATCH 03/36] fix: fix system test for mtls (#485) --- tests/system/test_system.py | 80 ++++++++++++++++++++++++++++++++----- 1 file changed, 69 insertions(+), 11 deletions(-) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 90b4059d6..84d0c9552 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -110,6 +110,8 @@ def setUpModule(): # Skip the test cases using bigquery, storage and pubsub clients for mTLS testing. # Bigquery and storage uses http which doesn't have mTLS support, pubsub doesn't # have mTLS fix released yet. +# We also need to skip HTTP client test cases because mTLS is only available for +# gRPC clients. skip_for_mtls = pytest.mark.skipif( Config.use_mtls == "always", reason="Skip the test case for mTLS testing" ) @@ -196,7 +198,12 @@ def test_list_entry_with_auditlog(self): gapic_logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") http_logger = Config.HTTP_CLIENT.logger(f"audit-proto-{uuid.uuid1()}-http") - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: logger.log_proto(audit_struct) # retrieve log @@ -249,7 +256,12 @@ def test_list_entry_with_requestlog(self): gapic_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") http_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}-http") - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: logger.log_proto(req_struct) # retrieve log @@ -301,7 +313,12 @@ def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" gapic_logger = Config.CLIENT.logger(self._logger_name("log_text")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_text(TEXT_PAYLOAD) entries = _list_entries(logger) @@ -314,7 +331,12 @@ def test_log_text_with_timestamp(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) now = datetime.utcnow() - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_text(text_payload, timestamp=now) entries = _list_entries(logger) @@ -329,7 +351,12 @@ def test_log_text_with_resource(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) now = datetime.utcnow() - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: resource = Resource( type="gae_app", labels={"module_id": "default", "version_id": "test", "zone": ""}, @@ -355,7 +382,12 @@ def test_log_text_w_metadata(self): REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_md")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_md_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_text( @@ -381,7 +413,12 @@ def test_log_text_w_metadata(self): def test_log_struct(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_struct(self.JSON_PAYLOAD) @@ -399,7 +436,12 @@ def test_log_struct_w_metadata(self): REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_md_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log_struct( @@ -423,7 +465,12 @@ def test_log_w_text(self): TEXT_PAYLOAD = "System test: test_log_w_text" gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_text")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_text")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log(TEXT_PAYLOAD) entries = _list_entries(logger) @@ -433,7 +480,12 @@ def test_log_w_text(self): def test_log_w_struct(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_struct_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log(self.JSON_PAYLOAD) @@ -446,7 +498,12 @@ def test_log_empty(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_empty")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_empty_http")) - for logger in [gapic_logger, http_logger]: + loggers = ( + [gapic_logger] + if Config.use_mtls == "always" + else [gapic_logger, http_logger] + ) + for logger in loggers: self.to_delete.append(logger) logger.log() @@ -829,6 +886,7 @@ def test_update_sink(self): self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri) + @skip_for_mtls def test_api_equality_list_logs(self): unique_id = uuid.uuid1() gapic_logger = Config.CLIENT.logger(f"api-list-{unique_id}") From def7440ac6964451f3202b5117e3060ec62045b0 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 11 Feb 2022 11:18:37 -0800 Subject: [PATCH 04/36] fix: remove unnecessary detect_resource calls from CloudLoggingHandler (#484) --- google/cloud/logging_v2/_helpers.py | 5 +++-- google/cloud/logging_v2/handlers/handlers.py | 2 +- .../handlers/transports/background_thread.py | 7 ++++++- google/cloud/logging_v2/handlers/transports/base.py | 13 +++++++++++++ google/cloud/logging_v2/handlers/transports/sync.py | 13 +++++++++++-- tests/unit/handlers/test_handlers.py | 2 +- .../handlers/transports/test_background_thread.py | 7 ++++--- tests/unit/handlers/transports/test_base.py | 7 +++++-- tests/unit/handlers/transports/test_sync.py | 7 ++++--- 9 files changed, 48 insertions(+), 15 deletions(-) diff --git a/google/cloud/logging_v2/_helpers.py b/google/cloud/logging_v2/_helpers.py index 51cc64868..75f84e50c 100644 --- a/google/cloud/logging_v2/_helpers.py +++ b/google/cloud/logging_v2/_helpers.py @@ -89,7 +89,7 @@ def entry_from_resource(resource, client, loggers): return LogEntry.from_api_repr(resource, client, loggers=loggers) -def retrieve_metadata_server(metadata_key): +def retrieve_metadata_server(metadata_key, timeout=5): """Retrieve the metadata key in the metadata server. See: https://cloud.google.com/compute/docs/storing-retrieving-metadata @@ -99,6 +99,7 @@ def retrieve_metadata_server(metadata_key): Key of the metadata which will form the url. You can also supply query parameters after the metadata key. e.g. "tags?alt=json" + timeout (number): number of seconds to wait for the HTTP request Returns: str: The value of the metadata key returned by the metadata server. @@ -106,7 +107,7 @@ def retrieve_metadata_server(metadata_key): url = METADATA_URL + metadata_key try: - response = requests.get(url, headers=METADATA_HEADERS) + response = requests.get(url, headers=METADATA_HEADERS, timeout=timeout) if response.status_code == requests.codes.ok: return response.text diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index 769146007..f6fa90d71 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -179,7 +179,7 @@ def __init__( resource = detect_resource(client.project) self.name = name self.client = client - self.transport = transport(client, name) + self.transport = transport(client, name, resource=resource) self.project_id = client.project self.resource = resource self.labels = labels diff --git a/google/cloud/logging_v2/handlers/transports/background_thread.py b/google/cloud/logging_v2/handlers/transports/background_thread.py index 1097830a8..f361e043c 100644 --- a/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -29,6 +29,7 @@ from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport +from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE _DEFAULT_GRACE_PERIOD = 5.0 # Seconds _DEFAULT_MAX_BATCH_SIZE = 10 @@ -260,6 +261,8 @@ def __init__( grace_period=_DEFAULT_GRACE_PERIOD, batch_size=_DEFAULT_MAX_BATCH_SIZE, max_latency=_DEFAULT_MAX_LATENCY, + resource=_GLOBAL_RESOURCE, + **kwargs, ): """ Args: @@ -275,9 +278,11 @@ def __init__( than the grace_period. This means this is effectively the longest amount of time the background thread will hold onto log entries before sending them to the server. + resource (Optional[Resource|dict]): The default monitored resource to associate + with logs when not specified """ self.client = client - logger = self.client.logger(name) + logger = self.client.logger(name, resource=resource) self.worker = _Worker( logger, grace_period=grace_period, diff --git a/google/cloud/logging_v2/handlers/transports/base.py b/google/cloud/logging_v2/handlers/transports/base.py index bd52b4e75..a0c9aafa4 100644 --- a/google/cloud/logging_v2/handlers/transports/base.py +++ b/google/cloud/logging_v2/handlers/transports/base.py @@ -14,6 +14,8 @@ """Module containing base class for logging transport.""" +from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + class Transport(object): """Base class for Google Cloud Logging handler transports. @@ -22,6 +24,17 @@ class Transport(object): client and name object, and must override :meth:`send`. """ + def __init__(self, client, name, resource=_GLOBAL_RESOURCE, **kwargs): + """ + Args: + client (~logging_v2.client.Client): + The Logging client. + name (str): The name of the lgoger. + resource (Optional[Resource|dict]): The default monitored resource to associate + with logs when not specified + """ + super().__init__() + def send(self, record, message, **kwargs): """Transport send to be implemented by subclasses. diff --git a/google/cloud/logging_v2/handlers/transports/sync.py b/google/cloud/logging_v2/handlers/transports/sync.py index 796f0d2ff..6f93b2e57 100644 --- a/google/cloud/logging_v2/handlers/transports/sync.py +++ b/google/cloud/logging_v2/handlers/transports/sync.py @@ -18,6 +18,7 @@ """ from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport +from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE class SyncTransport(Transport): @@ -26,8 +27,16 @@ class SyncTransport(Transport): Uses this library's Logging client to directly make the API call. """ - def __init__(self, client, name): - self.logger = client.logger(name) + def __init__(self, client, name, resource=_GLOBAL_RESOURCE, **kwargs): + """ + Args: + client (~logging_v2.client.Client): + The Logging client. + name (str): The name of the lgoger. + resource (Optional[Resource|dict]): The default monitored resource to associate + with logs when not specified + """ + self.logger = client.logger(name, resource=resource) def send(self, record, message, **kwargs): """Overrides transport.send(). diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index bbfacf59f..353e7d2f6 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -860,7 +860,7 @@ def __init__(self, project): class _Transport(object): - def __init__(self, client, name): + def __init__(self, client, name, resource=None): self.client = client self.name = name diff --git a/tests/unit/handlers/transports/test_background_thread.py b/tests/unit/handlers/transports/test_background_thread.py index f408de476..0c547d736 100644 --- a/tests/unit/handlers/transports/test_background_thread.py +++ b/tests/unit/handlers/transports/test_background_thread.py @@ -509,11 +509,12 @@ def commit(self): class _Logger(object): - def __init__(self, name): + def __init__(self, name, resource=None): self.name = name self._batch_cls = _Batch self._batch = None self._num_batches = 0 + self.resource = resource def batch(self): self._batch = self._batch_cls() @@ -530,6 +531,6 @@ def __init__(self, project, _http=None, credentials=None): self._credentials = credentials self._connection = mock.Mock(credentials=credentials, spec=["credentials"]) - def logger(self, name): # pylint: disable=unused-argument - self._logger = _Logger(name) + def logger(self, name, resource=None): # pylint: disable=unused-argument + self._logger = _Logger(name, resource=resource) return self._logger diff --git a/tests/unit/handlers/transports/test_base.py b/tests/unit/handlers/transports/test_base.py index 4cbfab02e..71ef1366a 100644 --- a/tests/unit/handlers/transports/test_base.py +++ b/tests/unit/handlers/transports/test_base.py @@ -29,10 +29,13 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_send_is_abstract(self): - target = self._make_one() + target = self._make_one("client", "name") with self.assertRaises(NotImplementedError): target.send(None, None, resource=None) + def test_resource_is_valid_argunent(self): + self._make_one("client", "name", resource="resource") + def test_flush_is_abstract_and_optional(self): - target = self._make_one() + target = self._make_one("client", "name") target.flush() diff --git a/tests/unit/handlers/transports/test_sync.py b/tests/unit/handlers/transports/test_sync.py index cc8ffe284..bdc78d89a 100644 --- a/tests/unit/handlers/transports/test_sync.py +++ b/tests/unit/handlers/transports/test_sync.py @@ -91,8 +91,9 @@ def test_send_struct(self): class _Logger(object): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE - def __init__(self, name): + def __init__(self, name, resource=_GLOBAL_RESOURCE): self.name = name + self.resource = resource def log( self, @@ -119,8 +120,8 @@ class _Client(object): def __init__(self, project): self.project = project - def logger(self, name): # pylint: disable=unused-argument - self._logger = _Logger(name) + def logger(self, name, resource=None): # pylint: disable=unused-argument + self._logger = _Logger(name, resource=resource) return self._logger From 217436d3de7d90704576645534e459470d48e31c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 14 Feb 2022 16:51:54 +0100 Subject: [PATCH 05/36] chore(deps): update dependency pytest to v7.0.1 (#490) --- samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 27df4634c..c531e813e 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==7.0.0 +pytest==7.0.1 From ab145630ffbb25a88cc058569b9e425e62b32ced Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 7 Mar 2022 16:54:42 -0500 Subject: [PATCH 06/36] fix(deps): require google-api-core>=1.31.5, >=2.3.2 (#494) --- setup.py | 2 +- testing/constraints-3.6.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 2cf113e34..2b39d7bbd 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.26.0, <3.0.0dev", + "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", "google-cloud-appengine-logging>=0.1.0, <2.0.0dev", "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", # NOTE: Maintainers, please do not require google-api-core>=2.x.x diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 250c505ff..0aa016644 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,6 +5,6 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.28.0 +google-api-core==1.31.5 google-cloud-core==1.4.1 proto-plus==1.15.0 From 6699f8c545d1a9904a945a9d789d7220da9433bf Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Mar 2022 01:00:22 +0000 Subject: [PATCH 07/36] feat: KMS configuration in settings (#489) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 431037888 Source-Link: https://github.com/googleapis/googleapis/commit/b3397f5febbf21dfc69b875ddabaf76bee765058 Source-Link: https://github.com/googleapis/googleapis-gen/commit/510b54e1cdefd53173984df16645081308fe897e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTEwYjU0ZTFjZGVmZDUzMTczOTg0ZGYxNjY0NTA4MTMwOGZlODk3ZSJ9 chore: use gapic-generator-python 0.63.4 chore: fix snippet region tag format chore: fix docstring code block formatting PiperOrigin-RevId: 430730865 Source-Link: https://github.com/googleapis/googleapis/commit/ea5800229f73f94fd7204915a86ed09dcddf429a Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca893ff8af25fc7fe001de1405a517d80446ecca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2E4OTNmZjhhZjI1ZmM3ZmUwMDFkZTE0MDVhNTE3ZDgwNDQ2ZWNjYSJ9 chore: formatting changes PiperOrigin-RevId: 430243637 Source-Link: https://github.com/googleapis/googleapis/commit/95da686e8840cf3edf872ce3d095967e24e41bf6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a1f056b7689ccbe5aebc0bfdd318e9945ee7602a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTFmMDU2Yjc2ODljY2JlNWFlYmMwYmZkZDMxOGU5OTQ1ZWU3NjAyYSJ9 feat: Update Logging API with latest changes PiperOrigin-RevId: 429289471 Source-Link: https://github.com/googleapis/googleapis/commit/acd5f89b8addd2ff54f41a7d43ff9b122bb43337 Source-Link: https://github.com/googleapis/googleapis-gen/commit/8a12622536ae2e9a8978198a151e89234b839b20 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOGExMjYyMjUzNmFlMmU5YTg5NzgxOThhMTUxZTg5MjM0YjgzOWIyMCJ9 chore: use gapic-generator-python 0.63.2 docs: add generated snippets chore: update copyright year to 2022 PiperOrigin-RevId: 427792504 Source-Link: https://github.com/googleapis/googleapis/commit/55b9e1e0b3106c850d13958352bc0751147b6b15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf4e86b753f42cb0edb1fd51fbe840d7da0a1cde Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmY0ZTg2Yjc1M2Y0MmNiMGVkYjFmZDUxZmJlODQwZDdkYTBhMWNkZSJ9 --- google/cloud/logging_v2/gapic_metadata.json | 30 + google/cloud/logging_v2/services/__init__.py | 2 +- .../services/config_service_v2/__init__.py | 2 +- .../config_service_v2/async_client.py | 1089 +++++- .../services/config_service_v2/client.py | 1099 +++++- .../services/config_service_v2/pagers.py | 2 +- .../config_service_v2/transports/__init__.py | 2 +- .../config_service_v2/transports/base.py | 45 +- .../config_service_v2/transports/grpc.py | 221 +- .../transports/grpc_asyncio.py | 229 +- .../services/logging_service_v2/__init__.py | 2 +- .../logging_service_v2/async_client.py | 215 +- .../services/logging_service_v2/client.py | 215 +- .../services/logging_service_v2/pagers.py | 2 +- .../logging_service_v2/transports/__init__.py | 2 +- .../logging_service_v2/transports/base.py | 2 +- .../logging_service_v2/transports/grpc.py | 12 +- .../transports/grpc_asyncio.py | 12 +- .../services/metrics_service_v2/__init__.py | 2 +- .../metrics_service_v2/async_client.py | 105 +- .../services/metrics_service_v2/client.py | 105 +- .../services/metrics_service_v2/pagers.py | 2 +- .../metrics_service_v2/transports/__init__.py | 2 +- .../metrics_service_v2/transports/base.py | 2 +- .../metrics_service_v2/transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- google/cloud/logging_v2/types/__init__.py | 18 +- google/cloud/logging_v2/types/log_entry.py | 57 +- google/cloud/logging_v2/types/logging.py | 103 +- .../cloud/logging_v2/types/logging_config.py | 606 ++- .../cloud/logging_v2/types/logging_metrics.py | 18 +- ...onfig_service_v2_copy_log_entries_async.py | 50 + ...config_service_v2_copy_log_entries_sync.py | 50 + ...d_config_service_v2_create_bucket_async.py | 46 + ...ed_config_service_v2_create_bucket_sync.py | 46 + ...onfig_service_v2_create_exclusion_async.py | 50 + ...config_service_v2_create_exclusion_sync.py | 50 + ...ted_config_service_v2_create_sink_async.py | 50 + ...ated_config_service_v2_create_sink_sync.py | 50 + ...ted_config_service_v2_create_view_async.py | 46 + ...ated_config_service_v2_create_view_sync.py | 46 + ...d_config_service_v2_delete_bucket_async.py | 43 + ...ed_config_service_v2_delete_bucket_sync.py | 43 + ...onfig_service_v2_delete_exclusion_async.py | 43 + ...config_service_v2_delete_exclusion_sync.py | 43 + ...ted_config_service_v2_delete_sink_async.py | 43 + ...ated_config_service_v2_delete_sink_sync.py | 43 + ...ted_config_service_v2_delete_view_async.py | 43 + ...ated_config_service_v2_delete_view_sync.py | 43 + ...ated_config_service_v2_get_bucket_async.py | 45 + ...rated_config_service_v2_get_bucket_sync.py | 45 + ...nfig_service_v2_get_cmek_settings_async.py | 45 + ...onfig_service_v2_get_cmek_settings_sync.py | 45 + ...d_config_service_v2_get_exclusion_async.py | 45 + ...ed_config_service_v2_get_exclusion_sync.py | 45 + ...ed_config_service_v2_get_settings_async.py | 45 + ...ted_config_service_v2_get_settings_sync.py | 45 + ...erated_config_service_v2_get_sink_async.py | 45 + ...nerated_config_service_v2_get_sink_sync.py | 45 + ...erated_config_service_v2_get_view_async.py | 45 + ...nerated_config_service_v2_get_view_sync.py | 45 + ...ed_config_service_v2_list_buckets_async.py | 46 + ...ted_config_service_v2_list_buckets_sync.py | 46 + ...config_service_v2_list_exclusions_async.py | 46 + ..._config_service_v2_list_exclusions_sync.py | 46 + ...ated_config_service_v2_list_sinks_async.py | 46 + ...rated_config_service_v2_list_sinks_sync.py | 46 + ...ated_config_service_v2_list_views_async.py | 46 + ...rated_config_service_v2_list_views_sync.py | 46 + ...config_service_v2_undelete_bucket_async.py | 43 + ..._config_service_v2_undelete_bucket_sync.py | 43 + ...d_config_service_v2_update_bucket_async.py | 45 + ...ed_config_service_v2_update_bucket_sync.py | 45 + ...g_service_v2_update_cmek_settings_async.py | 45 + ...ig_service_v2_update_cmek_settings_sync.py | 45 + ...onfig_service_v2_update_exclusion_async.py | 50 + ...config_service_v2_update_exclusion_sync.py | 50 + ...config_service_v2_update_settings_async.py | 45 + ..._config_service_v2_update_settings_sync.py | 45 + ...ted_config_service_v2_update_sink_async.py | 50 + ...ated_config_service_v2_update_sink_sync.py | 50 + ...ted_config_service_v2_update_view_async.py | 45 + ...ated_config_service_v2_update_view_sync.py | 45 + ...ted_logging_service_v2_delete_log_async.py | 43 + ...ated_logging_service_v2_delete_log_sync.py | 43 + ...gging_service_v2_list_log_entries_async.py | 46 + ...ogging_service_v2_list_log_entries_sync.py | 46 + ...ated_logging_service_v2_list_logs_async.py | 46 + ...rated_logging_service_v2_list_logs_sync.py | 46 + ...st_monitored_resource_descriptors_async.py | 45 + ...ist_monitored_resource_descriptors_sync.py | 45 + ...gging_service_v2_tail_log_entries_async.py | 56 + ...ogging_service_v2_tail_log_entries_sync.py | 56 + ...ging_service_v2_write_log_entries_async.py | 48 + ...gging_service_v2_write_log_entries_sync.py | 48 + ...rics_service_v2_create_log_metric_async.py | 50 + ...trics_service_v2_create_log_metric_sync.py | 50 + ...rics_service_v2_delete_log_metric_async.py | 43 + ...trics_service_v2_delete_log_metric_sync.py | 43 + ...metrics_service_v2_get_log_metric_async.py | 45 + ..._metrics_service_v2_get_log_metric_sync.py | 45 + ...trics_service_v2_list_log_metrics_async.py | 46 + ...etrics_service_v2_list_log_metrics_sync.py | 46 + ...rics_service_v2_update_log_metric_async.py | 50 + ...trics_service_v2_update_log_metric_sync.py | 50 + .../snippet_metadata_logging_v2.json | 3269 +++++++++++++++++ tests/__init__.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/gapic/__init__.py | 2 +- tests/unit/gapic/logging_v2/__init__.py | 2 +- .../logging_v2/test_config_service_v2.py | 627 +++- .../logging_v2/test_logging_service_v2.py | 2 +- .../logging_v2/test_metrics_service_v2.py | 14 +- 113 files changed, 10854 insertions(+), 691 deletions(-) create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py create mode 100644 samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py create mode 100644 samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py create mode 100644 samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py create mode 100644 samples/generated_samples/snippet_metadata_logging_v2.json diff --git a/google/cloud/logging_v2/gapic_metadata.json b/google/cloud/logging_v2/gapic_metadata.json index da4eefd47..a629e5a50 100644 --- a/google/cloud/logging_v2/gapic_metadata.json +++ b/google/cloud/logging_v2/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "ConfigServiceV2Client", "rpcs": { + "CopyLogEntries": { + "methods": [ + "copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" @@ -65,6 +70,11 @@ "get_exclusion" ] }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "GetSink": { "methods": [ "get_sink" @@ -115,6 +125,11 @@ "update_exclusion" ] }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] + }, "UpdateSink": { "methods": [ "update_sink" @@ -130,6 +145,11 @@ "grpc-async": { "libraryClient": "ConfigServiceV2AsyncClient", "rpcs": { + "CopyLogEntries": { + "methods": [ + "copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" @@ -185,6 +205,11 @@ "get_exclusion" ] }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "GetSink": { "methods": [ "get_sink" @@ -235,6 +260,11 @@ "update_exclusion" ] }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] + }, "UpdateSink": { "methods": [ "update_sink" diff --git a/google/cloud/logging_v2/services/__init__.py b/google/cloud/logging_v2/services/__init__.py index 4de65971c..e8e1c3845 100644 --- a/google/cloud/logging_v2/services/__init__.py +++ b/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/config_service_v2/__init__.py b/google/cloud/logging_v2/services/config_service_v2/__init__.py index e7f604280..6eb3681ce 100644 --- a/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index 81621a4e2..de3a6bbb7 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -31,6 +31,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config from google.protobuf import field_mask_pb2 # type: ignore @@ -62,6 +64,8 @@ class ConfigServiceV2AsyncClient: parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) + settings_path = staticmethod(ConfigServiceV2Client.settings_path) + parse_settings_path = staticmethod(ConfigServiceV2Client.parse_settings_path) common_billing_account_path = staticmethod( ConfigServiceV2Client.common_billing_account_path ) @@ -225,7 +229,27 @@ async def list_buckets( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: - r"""Lists buckets. + r"""Lists log buckets. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): @@ -313,7 +337,26 @@ async def get_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket. + r"""Gets a log bucket. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): @@ -326,7 +369,9 @@ async def get_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.GetBucketRequest(request) @@ -359,9 +404,30 @@ async def create_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): @@ -374,7 +440,9 @@ async def create_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.CreateBucketRequest(request) @@ -407,17 +475,38 @@ async def update_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: + r"""Updates a log bucket. This method replaces the following fields + in the existing bucket with values from the new bucket: ``retention_period`` If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + ``FAILED_PRECONDITION`` will be returned. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + .. code-block:: python - A buckets region may not be modified after it is created. + from google.cloud import logging_v2 + + def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): @@ -430,7 +519,9 @@ async def update_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.UpdateBucketRequest(request) @@ -463,9 +554,29 @@ async def delete_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -506,8 +617,26 @@ async def undelete_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -549,7 +678,27 @@ async def list_views( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: - r"""Lists views on a bucket. + r"""Lists views on a log bucket. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): @@ -629,7 +778,26 @@ async def get_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Gets a view. + r"""Gets a view on a log bucket.. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = client.get_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): @@ -642,8 +810,8 @@ async def get_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -677,8 +845,29 @@ async def create_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client.create_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): @@ -691,8 +880,8 @@ async def create_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -726,8 +915,31 @@ async def update_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client.update_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): @@ -740,8 +952,8 @@ async def update_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -775,7 +987,27 @@ async def delete_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a view from a bucket. + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -819,6 +1051,26 @@ async def list_sinks( ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sinks(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. @@ -915,6 +1167,25 @@ async def get_sink( ) -> logging_config.LogSink: r"""Gets a sink. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. @@ -928,7 +1199,9 @@ async def get_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -944,12 +1217,12 @@ async def get_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1018,6 +1291,31 @@ async def create_sink( permitted to write to the destination. A sink can export log entries only from the resource owning the sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. @@ -1031,8 +1329,9 @@ async def create_sink( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1055,12 +1354,12 @@ async def create_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1120,6 +1419,31 @@ async def update_sink( The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. @@ -1134,7 +1458,9 @@ async def update_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1152,16 +1478,18 @@ async def update_sink( overwritten if, and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1177,12 +1505,12 @@ async def update_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1251,6 +1579,23 @@ async def delete_sink( r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client.delete_sink(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. @@ -1265,7 +1610,9 @@ async def delete_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1334,7 +1681,29 @@ async def list_exclusions( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsAsyncPager: - r"""Lists all the exclusions in a parent resource. + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): @@ -1430,7 +1799,26 @@ async def get_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): @@ -1445,8 +1833,9 @@ async def get_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1459,17 +1848,13 @@ async def get_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1530,10 +1915,34 @@ async def create_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): @@ -1549,8 +1958,10 @@ async def create_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1571,17 +1982,13 @@ async def create_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1634,8 +2041,33 @@ async def update_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): @@ -1650,8 +2082,9 @@ async def update_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1686,17 +2119,13 @@ async def update_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1749,7 +2178,23 @@ async def delete_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes an exclusion. + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): @@ -1765,8 +2210,9 @@ async def delete_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1832,21 +2278,42 @@ async def get_cmek_settings( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. + r"""Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1861,12 +2328,12 @@ async def get_cmek_settings( a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. @@ -1902,11 +2369,11 @@ async def update_cmek_settings( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. + r"""Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -1914,15 +2381,35 @@ async def update_cmek_settings( ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1937,12 +2424,12 @@ async def update_cmek_settings( a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. @@ -1970,6 +2457,340 @@ async def update_cmek_settings( # Done; return the response. return response + async def get_settings( + self, + request: Union[logging_config.GetSettingsRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + See [Enabling CMEK for Log + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + name (:class:`str`): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.GetSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_settings( + self, + request: Union[logging_config.UpdateSettingsRequest, dict] = None, + *, + settings: logging_config.Settings = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): + The request object. The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + See [Enabling CMEK for Log + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + settings (:class:`google.cloud.logging_v2.types.Settings`): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = logging_config.UpdateSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def copy_log_entries( + self, + request: Union[logging_config.CopyLogEntriesRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]): + The request object. The parameters to CopyLogEntries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. + + """ + # Create or coerce a protobuf request object. + request = logging_config.CopyLogEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.copy_log_entries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index 10bed9347..041b1c838 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,6 +34,8 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config from google.protobuf import field_mask_pb2 # type: ignore @@ -228,6 +230,17 @@ def parse_log_view_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def settings_path(project: str,) -> str: + """Returns a fully-qualified settings string.""" + return "projects/{project}/settings".format(project=project,) + + @staticmethod + def parse_settings_path(path: str) -> Dict[str, str]: + """Parses a settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/settings$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str,) -> str: """Returns a fully-qualified billing_account string.""" @@ -461,7 +474,27 @@ def list_buckets( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: - r"""Lists buckets. + r"""Lists log buckets. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): @@ -549,7 +582,26 @@ def get_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket. + r"""Gets a log bucket. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): @@ -562,7 +614,9 @@ def get_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -596,9 +650,30 @@ def create_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): @@ -611,7 +686,9 @@ def create_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -645,17 +722,38 @@ def update_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: + r"""Updates a log bucket. This method replaces the following fields + in the existing bucket with values from the new bucket: ``retention_period`` If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + ``FAILED_PRECONDITION`` will be returned. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + .. code-block:: python - A buckets region may not be modified after it is created. + from google.cloud import logging_v2 + + def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): @@ -668,7 +766,9 @@ def update_bucket( Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -702,9 +802,29 @@ def delete_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -746,8 +866,26 @@ def undelete_bucket( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -790,7 +928,27 @@ def list_views( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: - r"""Lists views on a bucket. + r"""Lists views on a log bucket. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): @@ -870,7 +1028,26 @@ def get_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Gets a view. + r"""Gets a view on a log bucket.. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = client.get_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): @@ -883,8 +1060,8 @@ def get_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -919,8 +1096,29 @@ def create_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client.create_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): @@ -933,8 +1131,8 @@ def create_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -969,8 +1167,31 @@ def update_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client.update_view(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): @@ -983,8 +1204,8 @@ def update_view( Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1019,7 +1240,27 @@ def delete_view( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a view from a bucket. + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -1064,6 +1305,26 @@ def list_sinks( ) -> pagers.ListSinksPager: r"""Lists sinks. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sinks(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. @@ -1149,6 +1410,25 @@ def get_sink( ) -> logging_config.LogSink: r"""Gets a sink. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. @@ -1162,7 +1442,9 @@ def get_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1178,12 +1460,12 @@ def get_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1241,6 +1523,31 @@ def create_sink( permitted to write to the destination. A sink can export log entries only from the resource owning the sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. @@ -1254,8 +1561,9 @@ def create_sink( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1278,12 +1586,12 @@ def create_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1343,6 +1651,31 @@ def update_sink( The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. @@ -1357,7 +1690,9 @@ def update_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1375,16 +1710,18 @@ def update_sink( overwritten if, and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1400,12 +1737,12 @@ def update_sink( Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1463,6 +1800,23 @@ def delete_sink( r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client.delete_sink(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. @@ -1477,7 +1831,9 @@ def delete_sink( "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1535,7 +1891,29 @@ def list_exclusions( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsPager: - r"""Lists all the exclusions in a parent resource. + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) Args: request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): @@ -1620,7 +1998,26 @@ def get_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): @@ -1635,8 +2032,9 @@ def get_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1649,17 +2047,13 @@ def get_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1709,10 +2103,34 @@ def create_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): @@ -1728,8 +2146,10 @@ def create_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1750,17 +2170,13 @@ def create_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1813,8 +2229,33 @@ def update_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): @@ -1829,8 +2270,9 @@ def update_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1865,17 +2307,13 @@ def update_exclusion( Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -1928,7 +2366,23 @@ def delete_exclusion( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes an exclusion. + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): @@ -1944,8 +2398,9 @@ def delete_exclusion( "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2000,21 +2455,42 @@ def get_cmek_settings( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. + r"""Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2029,12 +2505,12 @@ def get_cmek_settings( a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. @@ -2071,11 +2547,11 @@ def update_cmek_settings( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. + r"""Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -2083,15 +2559,35 @@ def update_cmek_settings( ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -2106,12 +2602,12 @@ def update_cmek_settings( a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - See [Enabling CMEK for Logs + See [Enabling CMEK for Log Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. @@ -2140,6 +2636,341 @@ def update_cmek_settings( # Done; return the response. return response + def get_settings( + self, + request: Union[logging_config.GetSettingsRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + See [Enabling CMEK for Log + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_settings( + self, + request: Union[logging_config.UpdateSettingsRequest, dict] = None, + *, + settings: logging_config.Settings = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): + The request object. The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + See [Enabling CMEK for Log + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def copy_log_entries( + self, + request: Union[logging_config.CopyLogEntriesRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]): + The request object. The parameters to CopyLogEntries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CopyLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self diff --git a/google/cloud/logging_v2/services/config_service_v2/pagers.py b/google/cloud/logging_v2/services/config_service_v2/pagers.py index b0be053e4..3c5ce7754 100644 --- a/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index b1e24fc64..93a29df09 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 90e305488..6dfc1fd2f 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,10 +22,12 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore try: @@ -286,6 +288,15 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_settings: gapic_v1.method.wrap_method( + self.get_settings, default_timeout=None, client_info=client_info, + ), + self.update_settings: gapic_v1.method.wrap_method( + self.update_settings, default_timeout=None, client_info=client_info, + ), + self.copy_log_entries: gapic_v1.method.wrap_method( + self.copy_log_entries, default_timeout=None, client_info=client_info, + ), } def close(self): @@ -297,6 +308,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def list_buckets( self, @@ -516,5 +532,32 @@ def update_cmek_settings( ]: raise NotImplementedError() + @property + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], + Union[logging_config.Settings, Awaitable[logging_config.Settings]], + ]: + raise NotImplementedError() + + @property + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[logging_config.Settings, Awaitable[logging_config.Settings]], + ]: + raise NotImplementedError() + + @property + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + __all__ = ("ConfigServiceV2Transport",) diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 39d9d4f93..301334f80 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers +from google.api_core import operations_v1 from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -25,6 +26,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -111,6 +113,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -229,6 +232,20 @@ def grpc_channel(self) -> grpc.Channel: """ return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient(self.grpc_channel) + + # Return the client from cache. + return self._operations_client + @property def list_buckets( self, @@ -237,7 +254,7 @@ def list_buckets( ]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets. + Lists log buckets. Returns: Callable[[~.ListBucketsRequest], @@ -263,7 +280,7 @@ def get_bucket( ) -> Callable[[logging_config.GetBucketRequest], logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket. + Gets a log bucket. Returns: Callable[[~.GetBucketRequest], @@ -289,9 +306,9 @@ def create_bucket( ) -> Callable[[logging_config.CreateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the create bucket method over gRPC. - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. Returns: Callable[[~.CreateBucketRequest], @@ -317,17 +334,18 @@ def update_bucket( ) -> Callable[[logging_config.UpdateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: + Updates a log bucket. This method replaces the following fields + in the existing bucket with values from the new bucket: ``retention_period`` If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + ``FAILED_PRECONDITION`` will be returned. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. Returns: Callable[[~.UpdateBucketRequest], @@ -353,9 +371,12 @@ def delete_bucket( ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. Returns: Callable[[~.DeleteBucketRequest], @@ -381,8 +402,9 @@ def undelete_bucket( ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. Returns: Callable[[~.UndeleteBucketRequest], @@ -408,7 +430,7 @@ def list_views( ) -> Callable[[logging_config.ListViewsRequest], logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. - Lists views on a bucket. + Lists views on a log bucket. Returns: Callable[[~.ListViewsRequest], @@ -434,7 +456,7 @@ def get_view( ) -> Callable[[logging_config.GetViewRequest], logging_config.LogView]: r"""Return a callable for the get view method over gRPC. - Gets a view. + Gets a view on a log bucket.. Returns: Callable[[~.GetViewRequest], @@ -460,8 +482,8 @@ def create_view( ) -> Callable[[logging_config.CreateViewRequest], logging_config.LogView]: r"""Return a callable for the create view method over gRPC. - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. Returns: Callable[[~.CreateViewRequest], @@ -487,8 +509,11 @@ def update_view( ) -> Callable[[logging_config.UpdateViewRequest], logging_config.LogView]: r"""Return a callable for the update view method over gRPC. - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. Returns: Callable[[~.UpdateViewRequest], @@ -514,7 +539,10 @@ def delete_view( ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. - Deletes a view from a bucket. + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. Returns: Callable[[~.DeleteViewRequest], @@ -682,7 +710,8 @@ def list_exclusions( ]: r"""Return a callable for the list exclusions method over gRPC. - Lists all the exclusions in a parent resource. + Lists all the exclusions on the \_Default sink in a parent + resource. Returns: Callable[[~.ListExclusionsRequest], @@ -708,7 +737,7 @@ def get_exclusion( ) -> Callable[[logging_config.GetExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. - Gets the description of an exclusion. + Gets the description of an exclusion in the \_Default sink. Returns: Callable[[~.GetExclusionRequest], @@ -734,10 +763,9 @@ def create_exclusion( ) -> Callable[[logging_config.CreateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. Returns: Callable[[~.CreateExclusionRequest], @@ -763,8 +791,8 @@ def update_exclusion( ) -> Callable[[logging_config.UpdateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. - Changes one or more properties of an existing - exclusion. + Changes one or more properties of an existing exclusion in the + \_Default sink. Returns: Callable[[~.UpdateExclusionRequest], @@ -790,7 +818,7 @@ def delete_exclusion( ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. - Deletes an exclusion. + Deletes an exclusion in the \_Default sink. Returns: Callable[[~.DeleteExclusionRequest], @@ -816,13 +844,14 @@ def get_cmek_settings( ) -> Callable[[logging_config.GetCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. - Gets the Logs Router CMEK settings for the given resource. + Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -852,11 +881,11 @@ def update_cmek_settings( ]: r"""Return a callable for the update cmek settings method over gRPC. - Updates the Logs Router CMEK settings for the given resource. + Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -864,7 +893,7 @@ def update_cmek_settings( ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -886,6 +915,112 @@ def update_cmek_settings( ) return self._stubs["update_cmek_settings"] + @property + def get_settings( + self, + ) -> Callable[[logging_config.GetSettingsRequest], logging_config.Settings]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs["get_settings"] + + @property + def update_settings( + self, + ) -> Callable[[logging_config.UpdateSettingsRequest], logging_config.Settings]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs["update_settings"] + + @property + def copy_log_entries( + self, + ) -> Callable[[logging_config.CopyLogEntriesRequest], operations_pb2.Operation]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["copy_log_entries"] + def close(self): self.grpc_channel.close() diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index b4228c690..86e67253c 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -25,6 +26,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport @@ -157,6 +159,7 @@ def __init__( self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -231,6 +234,22 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_buckets( self, @@ -240,7 +259,7 @@ def list_buckets( ]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets. + Lists log buckets. Returns: Callable[[~.ListBucketsRequest], @@ -268,7 +287,7 @@ def get_bucket( ]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket. + Gets a log bucket. Returns: Callable[[~.GetBucketRequest], @@ -296,9 +315,9 @@ def create_bucket( ]: r"""Return a callable for the create bucket method over gRPC. - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. Returns: Callable[[~.CreateBucketRequest], @@ -326,17 +345,18 @@ def update_bucket( ]: r"""Return a callable for the update bucket method over gRPC. - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: + Updates a log bucket. This method replaces the following fields + in the existing bucket with values from the new bucket: ``retention_period`` If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + ``FAILED_PRECONDITION`` will be returned. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. Returns: Callable[[~.UpdateBucketRequest], @@ -362,9 +382,12 @@ def delete_bucket( ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. Returns: Callable[[~.DeleteBucketRequest], @@ -390,8 +413,9 @@ def undelete_bucket( ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. Returns: Callable[[~.UndeleteBucketRequest], @@ -419,7 +443,7 @@ def list_views( ]: r"""Return a callable for the list views method over gRPC. - Lists views on a bucket. + Lists views on a log bucket. Returns: Callable[[~.ListViewsRequest], @@ -445,7 +469,7 @@ def get_view( ) -> Callable[[logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. - Gets a view. + Gets a view on a log bucket.. Returns: Callable[[~.GetViewRequest], @@ -473,8 +497,8 @@ def create_view( ]: r"""Return a callable for the create view method over gRPC. - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. Returns: Callable[[~.CreateViewRequest], @@ -502,8 +526,11 @@ def update_view( ]: r"""Return a callable for the update view method over gRPC. - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. Returns: Callable[[~.UpdateViewRequest], @@ -529,7 +556,10 @@ def delete_view( ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. - Deletes a view from a bucket. + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. Returns: Callable[[~.DeleteViewRequest], @@ -704,7 +734,8 @@ def list_exclusions( ]: r"""Return a callable for the list exclusions method over gRPC. - Lists all the exclusions in a parent resource. + Lists all the exclusions on the \_Default sink in a parent + resource. Returns: Callable[[~.ListExclusionsRequest], @@ -732,7 +763,7 @@ def get_exclusion( ]: r"""Return a callable for the get exclusion method over gRPC. - Gets the description of an exclusion. + Gets the description of an exclusion in the \_Default sink. Returns: Callable[[~.GetExclusionRequest], @@ -760,10 +791,9 @@ def create_exclusion( ]: r"""Return a callable for the create exclusion method over gRPC. - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. Returns: Callable[[~.CreateExclusionRequest], @@ -791,8 +821,8 @@ def update_exclusion( ]: r"""Return a callable for the update exclusion method over gRPC. - Changes one or more properties of an existing - exclusion. + Changes one or more properties of an existing exclusion in the + \_Default sink. Returns: Callable[[~.UpdateExclusionRequest], @@ -818,7 +848,7 @@ def delete_exclusion( ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. - Deletes an exclusion. + Deletes an exclusion in the \_Default sink. Returns: Callable[[~.DeleteExclusionRequest], @@ -846,13 +876,14 @@ def get_cmek_settings( ]: r"""Return a callable for the get cmek settings method over gRPC. - Gets the Logs Router CMEK settings for the given resource. + Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -883,11 +914,11 @@ def update_cmek_settings( ]: r"""Return a callable for the update cmek settings method over gRPC. - Updates the Logs Router CMEK settings for the given resource. + Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -895,7 +926,7 @@ def update_cmek_settings( ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -917,6 +948,118 @@ def update_cmek_settings( ) return self._stubs["update_cmek_settings"] + @property + def get_settings( + self, + ) -> Callable[ + [logging_config.GetSettingsRequest], Awaitable[logging_config.Settings] + ]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_settings" not in self._stubs: + self._stubs["get_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/GetSettings", + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs["get_settings"] + + @property + def update_settings( + self, + ) -> Callable[ + [logging_config.UpdateSettingsRequest], Awaitable[logging_config.Settings] + ]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_settings" not in self._stubs: + self._stubs["update_settings"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/UpdateSettings", + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs["update_settings"] + + @property + def copy_log_entries( + self, + ) -> Callable[ + [logging_config.CopyLogEntriesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "copy_log_entries" not in self._stubs: + self._stubs["copy_log_entries"] = self.grpc_channel.unary_unary( + "/google.logging.v2.ConfigServiceV2/CopyLogEntries", + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["copy_log_entries"] + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/__init__.py index bd7a79820..41b2a2d15 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index dc8b56b81..c89da25a5 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -223,11 +223,28 @@ async def delete_log( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -235,16 +252,15 @@ async def delete_log( log_name (:class:`str`): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. @@ -324,6 +340,29 @@ async def write_log_entries( maximum of 1000 different resources (projects, organizations, billing accounts or folders) + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. @@ -332,19 +371,17 @@ async def write_log_entries( to all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or @@ -400,17 +437,17 @@ async def write_log_entries( Log entries with timestamps that are more than the `logs retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this @@ -490,6 +527,27 @@ async def list_log_entries( For ways to export log entries, see `Exporting Logs `__. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. @@ -497,18 +555,17 @@ async def list_log_entries( Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. @@ -620,6 +677,26 @@ async def list_monitored_resource_descriptors( r"""Lists the descriptors for monitored resource types used by Logging. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to @@ -686,18 +763,37 @@ async def list_logs( or billing accounts. Only logs that have entries are listed. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (:class:`str`): Required. The resource name that owns the logs: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -782,6 +878,37 @@ def tail_log_entries( Until the stream is terminated, it will continue reading logs. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): The request object AsyncIterator. The parameters to `TailLogEntries`. diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index b33821be5..3eae59704 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -405,11 +405,28 @@ def delete_log( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. + + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -417,16 +434,15 @@ def delete_log( log_name (str): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. @@ -495,6 +511,29 @@ def write_log_entries( maximum of 1000 different resources (projects, organizations, billing accounts or folders) + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. @@ -503,19 +542,17 @@ def write_log_entries( to all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or @@ -571,17 +608,17 @@ def write_log_entries( Log entries with timestamps that are more than the `logs retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this @@ -649,6 +686,27 @@ def list_log_entries( For ways to export log entries, see `Exporting Logs `__. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. @@ -656,18 +714,17 @@ def list_log_entries( Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. @@ -768,6 +825,26 @@ def list_monitored_resource_descriptors( r"""Lists the descriptors for monitored resource types used by Logging. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to @@ -826,18 +903,37 @@ def list_logs( or billing accounts. Only logs that have entries are listed. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (str): Required. The resource name that owns the logs: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -911,6 +1007,37 @@ def tail_log_entries( Until the stream is terminated, it will continue reading logs. + + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + Args: requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): The request object iterator. The parameters to `TailLogEntries`. diff --git a/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/google/cloud/logging_v2/services/logging_service_v2/pagers.py index ca4d01fac..e1e7188cd 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 65e713121..4e0163fe6 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 6fe2e9e8a..5f474f006 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 4f5c9b1ca..76b562d7e 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -233,11 +233,11 @@ def grpc_channel(self) -> grpc.Channel: def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. Returns: Callable[[~.DeleteLogRequest], diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 27b094831..1ef7198fd 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -237,11 +237,11 @@ def delete_log( ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. Returns: Callable[[~.DeleteLogRequest], diff --git a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index f37e39314..fc0615f19 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 311806df2..e3bf4c51a 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -216,6 +216,26 @@ async def list_log_metrics( ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. @@ -309,6 +329,25 @@ async def get_log_metric( ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. @@ -405,6 +444,30 @@ async def create_log_metric( ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. @@ -501,6 +564,30 @@ async def update_log_metric( ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. @@ -608,6 +695,22 @@ async def delete_log_metric( ) -> None: r"""Deletes a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + client.delete_log_metric(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index ade883811..5ab25db20 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -410,6 +410,26 @@ def list_log_metrics( ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. @@ -492,6 +512,25 @@ def get_log_metric( ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. @@ -577,6 +616,30 @@ def create_log_metric( ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. @@ -673,6 +736,30 @@ def update_log_metric( ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. @@ -769,6 +856,22 @@ def delete_log_metric( ) -> None: r"""Deletes a logs-based metric. + .. code-block:: python + + from google.cloud import logging_v2 + + def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + client.delete_log_metric(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 7026e3858..2c647cda1 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 10ccb830c..e28f020df 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index fef40f239..b3d9bab57 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 7b72b756f..d0241fdd2 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 889d7072e..28ff48f5c 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py index 7d1cdd99e..43b5674dd 100644 --- a/google/cloud/logging_v2/types/__init__.py +++ b/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ LogEntry, LogEntryOperation, LogEntrySourceLocation, + LogSplit, ) from .logging import ( DeleteLogRequest, @@ -35,6 +36,9 @@ from .logging_config import ( BigQueryOptions, CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, CreateBucketRequest, CreateExclusionRequest, CreateSinkRequest, @@ -46,6 +50,7 @@ GetBucketRequest, GetCmekSettingsRequest, GetExclusionRequest, + GetSettingsRequest, GetSinkRequest, GetViewRequest, ListBucketsRequest, @@ -60,13 +65,16 @@ LogExclusion, LogSink, LogView, + Settings, UndeleteBucketRequest, UpdateBucketRequest, UpdateCmekSettingsRequest, UpdateExclusionRequest, + UpdateSettingsRequest, UpdateSinkRequest, UpdateViewRequest, LifecycleState, + OperationState, ) from .logging_metrics import ( CreateLogMetricRequest, @@ -82,6 +90,7 @@ "LogEntry", "LogEntryOperation", "LogEntrySourceLocation", + "LogSplit", "DeleteLogRequest", "ListLogEntriesRequest", "ListLogEntriesResponse", @@ -96,6 +105,9 @@ "WriteLogEntriesResponse", "BigQueryOptions", "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", "CreateBucketRequest", "CreateExclusionRequest", "CreateSinkRequest", @@ -107,6 +119,7 @@ "GetBucketRequest", "GetCmekSettingsRequest", "GetExclusionRequest", + "GetSettingsRequest", "GetSinkRequest", "GetViewRequest", "ListBucketsRequest", @@ -121,13 +134,16 @@ "LogExclusion", "LogSink", "LogView", + "Settings", "UndeleteBucketRequest", "UpdateBucketRequest", "UpdateCmekSettingsRequest", "UpdateExclusionRequest", + "UpdateSettingsRequest", "UpdateSinkRequest", "UpdateViewRequest", "LifecycleState", + "OperationState", "CreateLogMetricRequest", "DeleteLogMetricRequest", "GetLogMetricRequest", diff --git a/google/cloud/logging_v2/types/log_entry.py b/google/cloud/logging_v2/types/log_entry.py index 93e428622..1bc7a3ea4 100644 --- a/google/cloud/logging_v2/types/log_entry.py +++ b/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ __protobuf__ = proto.module( package="google.logging.v2", - manifest={"LogEntry", "LogEntryOperation", "LogEntrySourceLocation",}, + manifest={"LogEntry", "LogEntryOperation", "LogEntrySourceLocation", "LogSplit",}, ) @@ -59,6 +59,7 @@ class LogEntry(proto.Message): ``[LOG_ID]`` must be URL-encoded within ``log_name``. Example: ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``[LOG_ID]`` must be less than 512 characters long and can only include the following characters: upper and lower case alphanumeric characters, forward-slash, underscore, hyphen, @@ -66,7 +67,7 @@ class LogEntry(proto.Message): For backward compatibility, if ``log_name`` begins with a forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. + is ingested as usual, but the forward-slash is removed. Listing the log entry will not show the leading slash and filtering for a log name with a leading slash will never return any results. @@ -139,9 +140,22 @@ class LogEntry(proto.Message): Optional. Information about the HTTP request associated with this log entry, if applicable. labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): - Optional. A set of user-defined (key, value) - data that provides additional information about - the log entry. + Optional. A map of key, value pairs that provides additional + information about the log entry. The labels can be + user-defined or system-defined. + + User-defined labels are arbitrary key, value pairs that you + can use to classify logs. + + System-defined labels are defined by GCP services for + platform logs. They have two components - a service + namespace component and the attribute name. For example: + ``compute.googleapis.com/resource_name``. + + Cloud Logging truncates label keys that exceed 512 B and + label values that exceed 64 KB upon their associated log + entry being written. The truncation is indicated by an + ellipsis at the end of the character string. operation (google.cloud.logging_v2.types.LogEntryOperation): Optional. Information about an operation associated with the log entry, if applicable. @@ -171,6 +185,10 @@ class LogEntry(proto.Message): source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): Optional. Source code location information associated with the log entry, if any. + split (google.cloud.logging_v2.types.LogSplit): + Optional. Information indicating this + LogEntry is part of a sequence of multiple log + entries split from a single LogEntry. """ log_name = proto.Field(proto.STRING, number=12,) @@ -201,6 +219,7 @@ class LogEntry(proto.Message): source_location = proto.Field( proto.MESSAGE, number=23, message="LogEntrySourceLocation", ) + split = proto.Field(proto.MESSAGE, number=35, message="LogSplit",) class LogEntryOperation(proto.Message): @@ -258,4 +277,30 @@ class LogEntrySourceLocation(proto.Message): function = proto.Field(proto.STRING, number=3,) +class LogSplit(proto.Message): + r"""Additional information used to correlate multiple log + entries. Used when a single LogEntry would exceed the Google + Cloud Logging size limit and is split across multiple log + entries. + + Attributes: + uid (str): + A globally unique identifier for all log entries in a + sequence of split log entries. All log entries with the same + \|LogSplit.uid\| are assumed to be part of the same sequence + of split log entries. + index (int): + The index of this LogEntry in the sequence of split log + entries. Log entries are given \|index\| values 0, 1, ..., + n-1 for a sequence of n log entries. + total_splits (int): + The total number of log entries that the + original LogEntry was split into. + """ + + uid = proto.Field(proto.STRING, number=1,) + index = proto.Field(proto.INT32, number=2,) + total_splits = proto.Field(proto.INT32, number=3,) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging.py b/google/cloud/logging_v2/types/logging.py index 8477c2a49..76d86e34f 100644 --- a/google/cloud/logging_v2/types/logging.py +++ b/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -47,16 +47,15 @@ class DeleteLogRequest(proto.Message): log_name (str): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. """ @@ -73,19 +72,17 @@ class WriteLogEntriesRequest(proto.Message): all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or folder that @@ -128,17 +125,17 @@ class WriteLogEntriesRequest(proto.Message): Log entries with timestamps that are more than the `logs retention - period `__ in - the past or more than 24 hours in the future will not be + period `__ in the + past or more than 24 hours in the future will not be available when calling ``entries.list``. However, those log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ for - calls to ``entries.write``, you should try to include - several log entries in this list, rather than calling this - method for each individual log entry. + limit `__ for calls + to ``entries.write``, you should try to include several log + entries in this list, rather than calling this method for + each individual log entry. partial_success (bool): Optional. Whether valid entries should be written even if some other entries fail due to INVALID_ARGUMENT or @@ -197,18 +194,17 @@ class ListLogEntriesRequest(proto.Message): Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. @@ -338,12 +334,10 @@ class ListLogsRequest(proto.Message): parent (str): Required. The resource name that owns the logs: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` page_size (int): Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of @@ -357,14 +351,18 @@ class ListLogsRequest(proto.Message): should be identical to those in the previous call. resource_names (Sequence[str]): Optional. The resource name that owns the logs: - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` To support legacy queries, it could also be: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` """ parent = proto.Field(proto.STRING, number=1,) @@ -404,18 +402,17 @@ class TailLogEntriesRequest(proto.Message): Required. Name of a parent resource from which to retrieve log entries: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): Optional. A filter that chooses which log entries to return. See `Advanced Logs diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py index f064f26b7..3dab7a143 100644 --- a/google/cloud/logging_v2/types/logging_config.py +++ b/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ package="google.logging.v2", manifest={ "LifecycleState", + "OperationState", "LogBucket", "LogView", "LogSink", @@ -56,6 +57,12 @@ "GetCmekSettingsRequest", "UpdateCmekSettingsRequest", "CmekSettings", + "GetSettingsRequest", + "UpdateSettingsRequest", + "Settings", + "CopyLogEntriesRequest", + "CopyLogEntriesMetadata", + "CopyLogEntriesResponse", }, ) @@ -67,18 +74,42 @@ class LifecycleState(proto.Enum): DELETE_REQUESTED = 2 +class OperationState(proto.Enum): + r"""List of different operation states. + High level state of the operation. This is used to report the + job's current state to the user. Once a long running operation + is created, the current state of the operation can be queried + even before the operation is finished and the final result is + available. + """ + OPERATION_STATE_UNSPECIFIED = 0 + OPERATION_STATE_SCHEDULED = 1 + OPERATION_STATE_WAITING_FOR_PERMISSIONS = 2 + OPERATION_STATE_RUNNING = 3 + OPERATION_STATE_SUCCEEDED = 4 + OPERATION_STATE_FAILED = 5 + OPERATION_STATE_CANCELLED = 6 + + class LogBucket(proto.Message): - r"""Describes a repository of logs. + r"""Describes a repository in which log entries are stored. Attributes: name (str): - The resource name of the bucket. For example: - "projects/my-project-id/locations/my-location/buckets/my-bucket-id - The supported locations are: "global" + Output only. The resource name of the bucket. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket`` + + For a list of supported locations, see `Supported + Regions `__ - For the location of ``global`` it is unspecified where logs - are actually stored. Once a bucket has been created, the - location can not be changed. + For the location of ``global`` it is unspecified where log + entries are actually stored. + + After a bucket has been created, the location cannot be + changed. description (str): Describes this bucket. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -96,12 +127,30 @@ class LogBucket(proto.Message): bucket creation time, the default time of 30 days will be used. locked (bool): - Whether the bucket has been locked. - The retention period on a locked bucket may not + Whether the bucket is locked. + The retention period on a locked bucket cannot be changed. Locked buckets may only be deleted if they are empty. lifecycle_state (google.cloud.logging_v2.types.LifecycleState): Output only. The bucket lifecycle state. + restricted_fields (Sequence[str]): + Log entry field paths that are denied access in this bucket. + + The following fields and their children are eligible: + ``textPayload``, ``jsonPayload``, ``protoPayload``, + ``httpRequest``, ``labels``, ``sourceLocation``. + + Restricting a repeated field will restrict all values. + Adding a parent will block all child fields. (e.g. + ``foo.bar`` will block ``foo.bar.baz``) + cmek_settings (google.cloud.logging_v2.types.CmekSettings): + The CMEK settings of the log bucket. If + present, new log entries written to this log + bucket are encrypted using the CMEK key provided + in this configuration. If a log bucket has CMEK + settings, the CMEK settings cannot be disabled + later by updating the log bucket. Changing the + KMS key is allowed. """ name = proto.Field(proto.STRING, number=1,) @@ -111,16 +160,20 @@ class LogBucket(proto.Message): retention_days = proto.Field(proto.INT32, number=11,) locked = proto.Field(proto.BOOL, number=9,) lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) + restricted_fields = proto.RepeatedField(proto.STRING, number=15,) + cmek_settings = proto.Field(proto.MESSAGE, number=19, message="CmekSettings",) class LogView(proto.Message): - r"""Describes a view over logs in a bucket. + r"""Describes a view over log entries in a bucket. Attributes: name (str): The resource name of the view. - For example - "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket/views/my-view`` description (str): Describes this view. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -131,11 +184,19 @@ class LogView(proto.Message): view. filter (str): Filter that restricts which log entries in a bucket are - visible in this view. Filters are restricted to be a logical - AND of ==/!= of any of the following: originating - project/folder/organization/billing account. resource type - log id Example: SOURCE("projects/myproject") AND - resource.type = "gce_instance" AND LOG_ID("stdout") + visible in this view. + + Filters are restricted to be a logical AND of ==/!= of any + of the following: + + - originating project/folder/organization/billing account. + - resource type + - log id + + For example: + + SOURCE("projects/myproject") AND resource.type = + "gce_instance" AND LOG_ID("stdout") """ name = proto.Field(proto.STRING, number=1,) @@ -148,10 +209,10 @@ class LogView(proto.Message): class LogSink(proto.Message): r"""Describes a sink used to export log entries to one of the following destinations in any project: a Cloud Storage bucket, a - BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. The sink must be - created within a project, organization, billing account, or - folder. + BigQuery dataset, a Pub/Sub topic or a Cloud Logging log bucket. + A logs filter controls which log entries are exported. The sink + must be created within a project, organization, billing account, + or folder. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -159,7 +220,9 @@ class LogSink(proto.Message): Attributes: name (str): Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + the project. + + For example: ``"my-syslog-errors-to-pubsub"``. Sink identifiers are limited to 100 characters and can include only the following characters: upper and lower-case alphanumeric characters, underscores, hyphens, and periods. @@ -182,30 +245,30 @@ class LogSink(proto.Message): Optional. An `advanced logs filter `__. The only exported log entries are those that are in the - resource owning the sink and that match the filter. For - example: + resource owning the sink and that match the filter. - :: + For example: - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + ``logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR`` description (str): Optional. A description of this sink. The maximum length of the description is 8000 characters. disabled (bool): - Optional. If set to True, then this sink is + Optional. If set to true, then this sink is disabled and it does not export any log entries. exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): - Optional. Log entries that match any of the exclusion - filters will not be exported. If a log entry is matched by - both ``filter`` and one of ``exclusion_filters`` it will not - be exported. + Optional. Log entries that match any of these exclusion + filters will not be exported. + + If a log entry is matched by both ``filter`` and one of + ``exclusion_filters`` it will not be exported. output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): Deprecated. This field is unused. writer_identity (str): Output only. An IAM identity—a service account or - group—under which Logging writes the exported log entries to - the sink's destination. This field is set by + group—under which Cloud Logging writes the exported log + entries to the sink's destination. This field is set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] @@ -218,25 +281,30 @@ class LogSink(proto.Message): Resource `__. Consult the destination service's documentation to determine the appropriate IAM roles to assign to the identity. + + Sinks that have a destination that is a log bucket in the + same project as the sink do not have a writer_identity and + no additional permissions are required. include_children (bool): Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, only the logs owned by the sink's parent resource - are available for export. If the field is true, then logs - from all the projects, folders, and billing accounts + are available for export. If the field is true, then log + entries from all the projects, folders, and billing accounts contained in the sink's parent resource are also available for export. Whether a particular log entry from the children - is exported depends on the sink's filter expression. For - example, if this field is true, then the filter + is exported depends on the sink's filter expression. + + For example, if this field is true, then the filter ``resource.type=gce_instance`` would export all Compute Engine VM instance log entries from all projects in the - sink's parent. To only export entries from certain child - projects, filter on the project part of the log name: + sink's parent. - :: + To only export entries from certain child projects, filter + on the project part of the log name: - logName:("projects/test-project1/" OR "projects/test-project2/") AND - resource.type=gce_instance + logName:("projects/test-project1/" OR + "projects/test-project2/") AND resource.type=gce_instance bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. @@ -286,18 +354,20 @@ class BigQueryOptions(proto.Message): use_partitioned_tables (bool): Optional. Whether to use `BigQuery's partition tables `__. - By default, Logging creates dated tables based on the log - entries' timestamps, e.g. syslog_20170523. With partitioned - tables the date suffix is no longer present and `special - query + By default, Cloud Logging creates dated tables based on the + log entries' timestamps, e.g. syslog_20170523. With + partitioned tables the date suffix is no longer present and + `special query syntax `__ has to be used instead. In both cases, tables are sharded based on UTC timezone. uses_timestamp_column_partitioning (bool): Output only. True if new timestamp column based partitioning is in use, false if legacy ingestion-time partitioning is in - use. All new sinks will have this field set true and will - use timestamp column based partitioning. If + use. + + All new sinks will have this field set true and will use + timestamp column based partitioning. If use_partitioned_tables is false, this value has no meaning and will be false. Legacy sinks using partitioned tables will have this field set to false. @@ -369,13 +439,15 @@ class CreateBucketRequest(proto.Message): Attributes: parent (str): - Required. The resource in which to create the bucket: + Required. The resource in which to create the log bucket: :: "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - Example: ``"projects/my-logging-project/locations/global"`` + For example: + + ``"projects/my-project/locations/global"`` bucket_id (str): Required. A client-assigned identifier such as ``"my-bucket"``. Identifiers are limited to 100 characters @@ -407,11 +479,9 @@ class UpdateBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. - Also requires permission - "resourcemanager.projects.updateLiens" to set the locked - property + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` bucket (google.cloud.logging_v2.types.LogBucket): Required. The updated bucket. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -420,10 +490,10 @@ class UpdateBucketRequest(proto.Message): and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - For a detailed ``FieldMask`` definition, see + For a detailed ``FieldMask`` definition, see: https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=retention_days``. + For example: ``updateMask=retention_days`` """ name = proto.Field(proto.STRING, number=1,) @@ -447,8 +517,9 @@ class GetBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name = proto.Field(proto.STRING, number=1,) @@ -468,8 +539,9 @@ class DeleteBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name = proto.Field(proto.STRING, number=1,) @@ -489,8 +561,9 @@ class UndeleteBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name = proto.Field(proto.STRING, number=1,) @@ -514,7 +587,9 @@ class ListViewsRequest(proto.Message): should be identical to those in the previous call. page_size (int): Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of + request. + + Non-positive values are ignored. The presence of ``nextPageToken`` in the response indicates that more results might be available. """ @@ -554,10 +629,11 @@ class CreateViewRequest(proto.Message): :: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + `"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"` - Example: - ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` view_id (str): Required. The id to use for this view. view (google.cloud.logging_v2.types.LogView): @@ -580,8 +656,9 @@ class UpdateViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` view (google.cloud.logging_v2.types.LogView): Required. The updated view. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -593,7 +670,7 @@ class UpdateViewRequest(proto.Message): For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` """ name = proto.Field(proto.STRING, number=1,) @@ -614,8 +691,9 @@ class GetViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` """ name = proto.Field(proto.STRING, number=1,) @@ -632,8 +710,11 @@ class DeleteViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + :: + + `"projects/my-project/locations/global/buckets/my-bucket/views/my-view"` """ name = proto.Field(proto.STRING, number=1,) @@ -705,7 +786,9 @@ class GetSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` """ sink_name = proto.Field(proto.STRING, number=1,) @@ -725,8 +808,9 @@ class CreateSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` sink (google.cloud.logging_v2.types.LogSink): Required. The new sink, whose ``name`` parameter is a sink identifier that is not already in use. @@ -735,9 +819,10 @@ class CreateSinkRequest(proto.Message): ``writer_identity`` in the new sink. If this value is omitted or set to false, and if the sink's parent is a project, then the value returned as ``writer_identity`` is - the same group or service account used by Logging before the - addition of writer identities to this API. The sink's - destination must be in the same project as the sink itself. + the same group or service account used by Cloud Logging + before the addition of writer identities to this API. The + sink's destination must be in the same project as the sink + itself. If this field is set to true, or if the sink is owned by a non-project resource such as an organization, then the value @@ -767,7 +852,9 @@ class UpdateSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` sink (google.cloud.logging_v2.types.LogSink): Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. @@ -793,16 +880,18 @@ class UpdateSinkRequest(proto.Message): and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the + An empty ``updateMask`` is temporarily treated as using the following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed and + specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` """ sink_name = proto.Field(proto.STRING, number=1,) @@ -828,20 +917,21 @@ class DeleteSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` """ sink_name = proto.Field(proto.STRING, number=1,) class LogExclusion(proto.Message): - r"""Specifies a set of log entries that are not to be stored in - Logging. If your GCP resource receives a large volume of logs, - you can use exclusions to reduce your chargeable logs. - Exclusions are processed after log sinks, so you can export log - entries before they are excluded. Note that organization-level - and folder-level exclusions don't apply to child resources, and - that you can't exclude audit log entries. + r"""Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of log entries, + you can use exclusions to reduce your chargeable logs. Note that + exclusions on organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify the \_Required + sink or exclude logs from it. Attributes: name (str): @@ -859,10 +949,11 @@ class LogExclusion(proto.Message): `sample function `__, you can exclude less than 100% of the matching log entries. + For example, the following query matches 99% of low-severity log entries from Google Cloud Storage buckets: - ``"resource.type=gcs_bucket severity`__ for more information. @@ -1069,11 +1165,14 @@ class GetCmekSettingsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" - Example: ``"organizations/12345/cmekSettings"``. + For example: + + ``"organizations/12345/cmekSettings"`` - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google + Cloud projects, folders, organizations and billing accounts. + Once configured for an organization, it applies to all + projects and folders in the Google Cloud organization. """ name = proto.Field(proto.STRING, number=1,) @@ -1083,7 +1182,7 @@ class UpdateCmekSettingsRequest(proto.Message): r"""The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -1098,15 +1197,18 @@ class UpdateCmekSettingsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" - Example: ``"organizations/12345/cmekSettings"``. + For example: + + ``"organizations/12345/cmekSettings"`` - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. cmek_settings (google.cloud.logging_v2.types.CmekSettings): Required. The CMEK settings to update. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -1118,7 +1220,7 @@ class UpdateCmekSettingsRequest(proto.Message): See [FieldMask][google.protobuf.FieldMask] for more information. - Example: ``"updateMask=kmsKeyName"`` + For example: ``"updateMask=kmsKeyName"`` """ name = proto.Field(proto.STRING, number=1,) @@ -1133,11 +1235,11 @@ class CmekSettings(proto.Message): associated with a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be configured for - GCP organizations. Once configured, it applies to all projects and - folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured for + Google Cloud organizations. Once configured, it applies to all + projects and folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -1149,14 +1251,163 @@ class CmekSettings(proto.Message): The resource name for the configured Cloud KMS key. KMS key name format: - "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" For example: - ``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"`` - To enable CMEK for the Logs Router, set this field to a - valid ``kms_key_name`` for which the associated service - account has the required + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required cloudkms.cryptoKeyEncrypterDecrypter roles + assigned for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name or + disabled by setting the key name to an empty string. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Log Router, set this field to an + empty string. + + See `Enabling CMEK for Log + Router `__ + for more information. + service_account_id (str): + Output only. The service account that will be used by the + Log Router to access your Cloud KMS key. + + Before enabling CMEK for Log Router, you must first assign + the cloudkms.cryptoKeyEncrypterDecrypter role to the service + account that the Log Router will use to access your Cloud + KMS key. Use + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + to obtain the service account ID. + + See `Enabling CMEK for Log + Router `__ + for more information. + """ + + name = proto.Field(proto.STRING, number=1,) + kms_key_name = proto.Field(proto.STRING, number=2,) + service_account_id = proto.Field(proto.STRING, number=3,) + + +class GetSettingsRequest(proto.Message): + r"""The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing accounts. + Currently it can only be configured for organizations. Once + configured for an organization, it applies to all projects + and folders in the Google Cloud organization. + """ + + name = proto.Field(proto.STRING, number=1,) + + +class UpdateSettingsRequest(proto.Message): + r"""The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the settings to update. + + :: + + "organizations/[ORGANIZATION_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be overwritten + if and only if it is in the update mask. Output only fields + cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + """ + + name = proto.Field(proto.STRING, number=1,) + settings = proto.Field(proto.MESSAGE, number=2, message="Settings",) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) + + +class Settings(proto.Message): + r"""Describes the settings associated with a project, folder, + organization, billing account, or flexible resource. + + Attributes: + name (str): + Output only. The resource name of the + settings. + kms_key_name (str): + Optional. The resource name for the configured Cloud KMS + key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key. @@ -1168,31 +1419,118 @@ class CmekSettings(proto.Message): the time of encryption unless access to that key has been revoked. - To disable CMEK for the Logs Router, set this field to an + To disable CMEK for the Log Router, set this field to an empty string. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. - service_account_id (str): + kms_service_account_id (str): Output only. The service account that will be used by the - Logs Router to access your Cloud KMS key. + Log Router to access your Cloud KMS key. - Before enabling CMEK for Logs Router, you must first assign + Before enabling CMEK for Log Router, you must first assign the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to - the service account that the Logs Router will use to access + the service account that the Log Router will use to access your Cloud KMS key. Use - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings] to obtain the service account ID. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + storage_location (str): + Optional. The Cloud region that will be used for \_Default + and \_Required log buckets for newly created projects and + folders. For example ``europe-west1``. This setting does not + affect the location of custom log buckets. + disable_default_sink (bool): + Optional. If set to true, the \_Default sink in newly + created projects and folders will created in a disabled + state. This can be used to automatically disable log + ingestion if there is already an aggregated sink configured + in the hierarchy. The \_Default sink can be re-enabled + manually if needed. """ name = proto.Field(proto.STRING, number=1,) kms_key_name = proto.Field(proto.STRING, number=2,) - service_account_id = proto.Field(proto.STRING, number=3,) + kms_service_account_id = proto.Field(proto.STRING, number=3,) + storage_location = proto.Field(proto.STRING, number=4,) + disable_default_sink = proto.Field(proto.BOOL, number=5,) + + +class CopyLogEntriesRequest(proto.Message): + r"""The parameters to CopyLogEntries. + + Attributes: + name (str): + Required. Log bucket from which to copy log entries. + + For example: + + ``"projects/my-project/locations/global/buckets/my-source-bucket"`` + filter (str): + Optional. A filter specifying which log + entries to copy. The filter must be no more than + 20k characters. An empty filter matches all log + entries. + destination (str): + Required. Destination to which to copy log + entries. + """ + + name = proto.Field(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=3,) + destination = proto.Field(proto.STRING, number=4,) + + +class CopyLogEntriesMetadata(proto.Message): + r"""Metadata for CopyLogEntries long running operations. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + cancellation_requested (bool): + Identifies whether the user has requested + cancellation of the operation. + request (google.cloud.logging_v2.types.CopyLogEntriesRequest): + CopyLogEntries RPC request. + progress (int): + Estimated progress of the operation (0 - + 100%). + writer_identity (str): + The IAM identity of a service account that must be granted + access to the destination. + + If the service account is not granted permission to the + destination within an hour, the operation will be cancelled. + + For example: ``"serviceAccount:foo@bar.com"`` + """ + + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=3, enum="OperationState",) + cancellation_requested = proto.Field(proto.BOOL, number=4,) + request = proto.Field(proto.MESSAGE, number=5, message="CopyLogEntriesRequest",) + progress = proto.Field(proto.INT32, number=6,) + writer_identity = proto.Field(proto.STRING, number=7,) + + +class CopyLogEntriesResponse(proto.Message): + r"""Response type for CopyLogEntries long running operations. + + Attributes: + log_entries_copied_count (int): + Number of log entries copied. + """ + + log_entries_copied_count = proto.Field(proto.INT64, number=1,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging_metrics.py b/google/cloud/logging_v2/types/logging_metrics.py index 26d855680..af1f2f548 100644 --- a/google/cloud/logging_v2/types/logging_metrics.py +++ b/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -55,12 +55,12 @@ class LogMetric(proto.Message): forward-slash character (``/``) denotes a hierarchy of name pieces, and it cannot be the first character of the name. - The metric identifier in this field must not be - `URL-encoded `__. - However, when the metric identifier appears as the - ``[METRIC_ID]`` part of a ``metric_name`` API parameter, - then the metric identifier must be URL-encoded. Example: - ``"projects/my-project/metrics/nginx%2Frequests"``. + This field is the ``[METRIC_ID]`` part of a metric resource + name in the format + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". Example: If the + resource name of a metric is + ``"projects/my-project/metrics/nginx%2Frequests"``, this + field's value is ``"nginx/requests"``. description (str): Optional. A description of this metric, which is used in documentation. The maximum length of @@ -75,6 +75,9 @@ class LogMetric(proto.Message): "resource.type=gae_app AND severity>=ERROR" The maximum length of the filter is 20000 characters. + disabled (bool): + Optional. If set to True, then this metric is + disabled and it does not generate any points. metric_descriptor (google.api.metric_pb2.MetricDescriptor): Optional. The metric descriptor associated with the logs-based metric. If unspecified, it uses a default metric @@ -170,6 +173,7 @@ class ApiVersion(proto.Enum): name = proto.Field(proto.STRING, number=1,) description = proto.Field(proto.STRING, number=2,) filter = proto.Field(proto.STRING, number=3,) + disabled = proto.Field(proto.BOOL, number=12,) metric_descriptor = proto.Field( proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py new file mode 100644 index 000000000..abe149bd6 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py new file mode 100644 index 000000000..90eb5354e --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] +from google.cloud import logging_v2 + + +def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py new file mode 100644 index 000000000..1c5c329c8 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucket_async] +from google.cloud import logging_v2 + + +async def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = await client.create_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucket_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py new file mode 100644 index 000000000..9b3093229 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucket_sync] +from google.cloud import logging_v2 + + +def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucket_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py new file mode 100644 index 000000000..5be1a9ad3 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateExclusion_async] +from google.cloud import logging_v2 + + +async def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = await client.create_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateExclusion_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py new file mode 100644 index 000000000..3b57560f3 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] +from google.cloud import logging_v2 + + +def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py new file mode 100644 index 000000000..789598d4c --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateSink_async] +from google.cloud import logging_v2 + + +async def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = await client.create_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateSink_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py new file mode 100644 index 000000000..e22bc6055 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateSink_sync] +from google.cloud import logging_v2 + + +def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateSink_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py new file mode 100644 index 000000000..499d4eeba --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateView_async] +from google.cloud import logging_v2 + + +async def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = await client.create_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateView_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py new file mode 100644 index 000000000..8e6425d71 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateView_sync] +from google.cloud import logging_v2 + + +def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client.create_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateView_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py new file mode 100644 index 000000000..def3e5abc --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteBucket_async] +from google.cloud import logging_v2 + + +async def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + await client.delete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteBucket_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py new file mode 100644 index 000000000..64c95c992 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] +from google.cloud import logging_v2 + + +def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py new file mode 100644 index 000000000..4c042c3be --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_async] +from google.cloud import logging_v2 + + +async def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + await client.delete_exclusion(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteExclusion_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py new file mode 100644 index 000000000..dc3136584 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync] +from google.cloud import logging_v2 + + +def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client.delete_exclusion(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py new file mode 100644 index 000000000..fe5acb523 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteSink_async] +from google.cloud import logging_v2 + + +async def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + await client.delete_sink(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteSink_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py new file mode 100644 index 000000000..d9ddc66a0 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteSink_sync] +from google.cloud import logging_v2 + + +def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client.delete_sink(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteSink_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py new file mode 100644 index 000000000..fd1eee969 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteView_async] +from google.cloud import logging_v2 + + +async def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + await client.delete_view(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteView_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py new file mode 100644 index 000000000..1169b4000 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteView_sync] +from google.cloud import logging_v2 + + +def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + client.delete_view(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteView_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py new file mode 100644 index 000000000..4b964aa74 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetBucket_async] +from google.cloud import logging_v2 + + +async def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = await client.get_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetBucket_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py new file mode 100644 index 000000000..1b299dd56 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetBucket_sync] +from google.cloud import logging_v2 + + +def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetBucket_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py new file mode 100644 index 000000000..356f0db9f --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] +from google.cloud import logging_v2 + + +async def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py new file mode 100644 index 000000000..83dfc8d2c --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] +from google.cloud import logging_v2 + + +def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py new file mode 100644 index 000000000..27a764445 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetExclusion_async] +from google.cloud import logging_v2 + + +async def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetExclusion_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py new file mode 100644 index 000000000..980914dac --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetExclusion_sync] +from google.cloud import logging_v2 + + +def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetExclusion_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py new file mode 100644 index 000000000..0da6e2a7e --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSettings_async] +from google.cloud import logging_v2 + + +async def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSettings_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py new file mode 100644 index 000000000..ccbc05d50 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSettings_sync] +from google.cloud import logging_v2 + + +def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSettings_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py new file mode 100644 index 000000000..fa3d7cf7f --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSink_async] +from google.cloud import logging_v2 + + +async def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = await client.get_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSink_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py new file mode 100644 index 000000000..48581e470 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSink_sync] +from google.cloud import logging_v2 + + +def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSink_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py new file mode 100644 index 000000000..9f26a5417 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetView_async] +from google.cloud import logging_v2 + + +async def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = await client.get_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetView_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py new file mode 100644 index 000000000..f88c15d2e --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetView_sync] +from google.cloud import logging_v2 + + +def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = client.get_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetView_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py new file mode 100644 index 000000000..4e3bfea55 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuckets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListBuckets_async] +from google.cloud import logging_v2 + + +async def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListBuckets_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py new file mode 100644 index 000000000..3522c4c89 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuckets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListBuckets_sync] +from google.cloud import logging_v2 + + +def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListBuckets_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py new file mode 100644 index 000000000..788436d6a --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExclusions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListExclusions_async] +from google.cloud import logging_v2 + + +async def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListExclusions_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py new file mode 100644 index 000000000..8ea9407a1 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExclusions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListExclusions_sync] +from google.cloud import logging_v2 + + +def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListExclusions_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py new file mode 100644 index 000000000..b43b5682a --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListSinks_async] +from google.cloud import logging_v2 + + +async def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sinks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListSinks_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py new file mode 100644 index 000000000..235395e6d --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListSinks_sync] +from google.cloud import logging_v2 + + +def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sinks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListSinks_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py new file mode 100644 index 000000000..27910c9f7 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListViews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListViews_async] +from google.cloud import logging_v2 + + +async def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListViews_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py new file mode 100644 index 000000000..2e5b6e53b --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListViews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListViews_sync] +from google.cloud import logging_v2 + + +def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListViews_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py new file mode 100644 index 000000000..020866b75 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] +from google.cloud import logging_v2 + + +async def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + await client.undelete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py new file mode 100644 index 000000000..0dfb39a11 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] +from google.cloud import logging_v2 + + +def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py new file mode 100644 index 000000000..78245abfc --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucket_async] +from google.cloud import logging_v2 + + +async def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = await client.update_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucket_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py new file mode 100644 index 000000000..c285fd542 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] +from google.cloud import logging_v2 + + +def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py new file mode 100644 index 000000000..8d49b85e7 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] +from google.cloud import logging_v2 + + +async def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.update_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py new file mode 100644 index 000000000..7b04208d4 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] +from google.cloud import logging_v2 + + +def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py new file mode 100644 index 000000000..d06cf80d4 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] +from google.cloud import logging_v2 + + +async def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = await client.update_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py new file mode 100644 index 000000000..c0dba34cc --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] +from google.cloud import logging_v2 + + +def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py new file mode 100644 index 000000000..dba1d4e8e --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_async] +from google.cloud import logging_v2 + + +async def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py new file mode 100644 index 000000000..f70f52036 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] +from google.cloud import logging_v2 + + +def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py new file mode 100644 index 000000000..c46b9ab42 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSink_async] +from google.cloud import logging_v2 + + +async def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = await client.update_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSink_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py new file mode 100644 index 000000000..9639ece28 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSink_sync] +from google.cloud import logging_v2 + + +def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSink_sync] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py new file mode 100644 index 000000000..250d3f9dc --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateView_async] +from google.cloud import logging_v2 + + +async def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = await client.update_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateView_async] diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py new file mode 100644 index 000000000..139784880 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateView_sync] +from google.cloud import logging_v2 + + +def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client.update_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateView_sync] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py new file mode 100644 index 000000000..6338b9abc --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_DeleteLog_async] +from google.cloud import logging_v2 + + +async def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + await client.delete_log(request=request) + + +# [END logging_v2_generated_LoggingServiceV2_DeleteLog_async] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py new file mode 100644 index 000000000..36280057b --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_DeleteLog_sync] +from google.cloud import logging_v2 + + +def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + client.delete_log(request=request) + + +# [END logging_v2_generated_LoggingServiceV2_DeleteLog_sync] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py new file mode 100644 index 000000000..4a8692b04 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogEntries_async] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py new file mode 100644 index 000000000..062075af9 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] +from google.cloud import logging_v2 + + +def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py new file mode 100644 index 000000000..fb0106199 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogs_async] +from google.cloud import logging_v2 + + +async def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogs_async] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py new file mode 100644 index 000000000..0f775572f --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogs_sync] +from google.cloud import logging_v2 + + +def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogs_sync] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py new file mode 100644 index 000000000..b8f339701 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMonitoredResourceDescriptors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] +from google.cloud import logging_v2 + + +async def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py new file mode 100644 index 000000000..736d64d61 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMonitoredResourceDescriptors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] +from google.cloud import logging_v2 + + +def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py new file mode 100644 index 000000000..3e77920f8 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TailLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_TailLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.tail_log_entries(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_TailLogEntries_async] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py new file mode 100644 index 000000000..ee1108b33 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TailLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] +from google.cloud import logging_v2 + + +def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py new file mode 100644 index 000000000..28025d777 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = await client.write_log_entries(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py new file mode 100644 index 000000000..31569811c --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] +from google.cloud import logging_v2 + + +def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py new file mode 100644 index 000000000..96690c2c3 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = await client.create_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py new file mode 100644 index 000000000..051694d31 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] +from google.cloud import logging_v2 + + +def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py new file mode 100644 index 000000000..bf2ee5e4a --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + await client.delete_log_metric(request=request) + + +# [END logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py new file mode 100644 index 000000000..eae109200 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync] +from google.cloud import logging_v2 + + +def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + client.delete_log_metric(request=request) + + +# [END logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py new file mode 100644 index 000000000..cea94a356 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_GetLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = await client.get_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_GetLogMetric_async] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py new file mode 100644 index 000000000..eea36222a --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] +from google.cloud import logging_v2 + + +def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py new file mode 100644 index 000000000..9dac77937 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] +from google.cloud import logging_v2 + + +async def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py new file mode 100644 index 000000000..97b3c2f13 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] +from google.cloud import logging_v2 + + +def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py new file mode 100644 index 000000000..c94c70e76 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = await client.update_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py new file mode 100644 index 000000000..bcdff3269 --- /dev/null +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] +from google.cloud import logging_v2 + + +def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] diff --git a/samples/generated_samples/snippet_metadata_logging_v2.json b/samples/generated_samples/snippet_metadata_logging_v2.json new file mode 100644 index 000000000..b6ad799b1 --- /dev/null +++ b/samples/generated_samples/snippet_metadata_logging_v2.json @@ -0,0 +1,3269 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CopyLogEntries" + } + }, + "file": "logging_v2_generated_config_service_v2_copy_log_entries_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CopyLogEntries" + } + }, + "file": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + } + }, + "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + } + }, + "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + } + }, + "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + } + }, + "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + } + }, + "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetCmekSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetCmekSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_get_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSink" + } + }, + "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSink" + } + }, + "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetView" + } + }, + "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetView" + } + }, + "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListBuckets" + } + }, + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListBuckets" + } + }, + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListExclusions" + } + }, + "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListExclusions" + } + }, + "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListSinks" + } + }, + "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListSinks" + } + }, + "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListViews" + } + }, + "file": "logging_v2_generated_config_service_v2_list_views_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListViews" + } + }, + "file": "logging_v2_generated_config_service_v2_list_views_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UndeleteBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UndeleteBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucket" + } + }, + "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateCmekSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateCmekSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_update_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateExclusion" + } + }, + "file": "logging_v2_generated_config_service_v2_update_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_update_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSettings" + } + }, + "file": "logging_v2_generated_config_service_v2_update_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSink" + } + }, + "file": "logging_v2_generated_config_service_v2_update_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSink" + } + }, + "file": "logging_v2_generated_config_service_v2_update_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateView" + } + }, + "file": "logging_v2_generated_config_service_v2_update_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateView" + } + }, + "file": "logging_v2_generated_config_service_v2_update_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "DeleteLog" + } + }, + "file": "logging_v2_generated_logging_service_v2_delete_log_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "DeleteLog" + } + }, + "file": "logging_v2_generated_logging_service_v2_delete_log_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogs" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_logs_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogs" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_logs_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListMonitoredResourceDescriptors" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 40, + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 41, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListMonitoredResourceDescriptors" + } + }, + "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 40, + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 41, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "TailLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "TailLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "WriteLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_write_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "WriteLogEntries" + } + }, + "file": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "CreateLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "CreateLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "DeleteLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "DeleteLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "GetLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "GetLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "ListLogMetrics" + } + }, + "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "ListLogMetrics" + } + }, + "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "UpdateLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "UpdateLogMetric" + } + }, + "file": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/tests/__init__.py b/tests/__init__.py index 4de65971c..e8e1c3845 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 4de65971c..e8e1c3845 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 4de65971c..e8e1c3845 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/logging_v2/__init__.py b/tests/unit/gapic/logging_v2/__init__.py index 4de65971c..e8e1c3845 100644 --- a/tests/unit/gapic/logging_v2/__init__.py +++ b/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index e7d2ea7d1..401394deb 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,9 +25,13 @@ from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -38,6 +42,7 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1018,6 +1023,7 @@ def test_get_bucket(request_type, transport: str = "grpc"): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) response = client.get_bucket(request) @@ -1033,6 +1039,7 @@ def test_get_bucket(request_type, transport: str = "grpc"): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] def test_get_bucket_empty_call(): @@ -1072,6 +1079,7 @@ async def test_get_bucket_async( retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) ) response = await client.get_bucket(request) @@ -1088,6 +1096,7 @@ async def test_get_bucket_async( assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -1167,6 +1176,7 @@ def test_create_bucket(request_type, transport: str = "grpc"): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) response = client.create_bucket(request) @@ -1182,6 +1192,7 @@ def test_create_bucket(request_type, transport: str = "grpc"): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] def test_create_bucket_empty_call(): @@ -1221,6 +1232,7 @@ async def test_create_bucket_async( retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) ) response = await client.create_bucket(request) @@ -1237,6 +1249,7 @@ async def test_create_bucket_async( assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -1316,6 +1329,7 @@ def test_update_bucket(request_type, transport: str = "grpc"): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) response = client.update_bucket(request) @@ -1331,6 +1345,7 @@ def test_update_bucket(request_type, transport: str = "grpc"): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] def test_update_bucket_empty_call(): @@ -1370,6 +1385,7 @@ async def test_update_bucket_async( retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + restricted_fields=["restricted_fields_value"], ) ) response = await client.update_bucket(request) @@ -1386,6 +1402,7 @@ async def test_update_bucket_async( assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.restricted_fields == ["restricted_fields_value"] @pytest.mark.asyncio @@ -5433,6 +5450,538 @@ async def test_update_cmek_settings_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] +@pytest.mark.parametrize("request_type", [logging_config.GetSettingsRequest, dict,]) +def test_get_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + response = client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +def test_get_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + client.get_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) + response = await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +@pytest.mark.asyncio +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) + + +def test_get_settings_field_headers(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_settings_flattened(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_settings(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_settings_flattened_error(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + logging_config.GetSettingsRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_settings(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_settings( + logging_config.GetSettingsRequest(), name="name_value", + ) + + +@pytest.mark.parametrize("request_type", [logging_config.UpdateSettingsRequest, dict,]) +def test_update_settings(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + response = client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +def test_update_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + client.update_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_settings_async( + transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings( + name="name_value", + kms_key_name="kms_key_name_value", + kms_service_account_id="kms_service_account_id_value", + storage_location="storage_location_value", + disable_default_sink=True, + ) + ) + response = await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.kms_service_account_id == "kms_service_account_id_value" + assert response.storage_location == "storage_location_value" + assert response.disable_default_sink is True + + +@pytest.mark.asyncio +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) + + +def test_update_settings_field_headers(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = logging_config.Settings() + client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_settings_flattened(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_settings( + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_settings_flattened_error(): + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_settings), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + logging_config.Settings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_settings( + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize("request_type", [logging_config.CopyLogEntriesRequest, dict,]) +def test_copy_log_entries(request_type, transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_copy_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + client.copy_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + + +@pytest.mark.asyncio +async def test_copy_log_entries_async( + transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest +): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.copy_log_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_copy_log_entries_async_from_dict(): + await test_copy_log_entries_async(request_type=dict) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( @@ -5570,6 +6119,9 @@ def test_config_service_v2_base_transport(): "delete_exclusion", "get_cmek_settings", "update_cmek_settings", + "get_settings", + "update_settings", + "copy_log_entries", ) for method in methods: with pytest.raises(NotImplementedError): @@ -5578,6 +6130,11 @@ def test_config_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -5886,6 +6443,32 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel +def test_config_service_v2_grpc_lro_client(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_config_service_v2_grpc_lro_async_client(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_cmek_settings_path(): project = "squid" expected = "projects/{project}/cmekSettings".format(project=project,) @@ -5996,8 +6579,26 @@ def test_parse_log_view_path(): assert expected == actual +def test_settings_path(): + project = "squid" + expected = "projects/{project}/settings".format(project=project,) + actual = ConfigServiceV2Client.settings_path(project) + assert expected == actual + + +def test_parse_settings_path(): + expected = { + "project": "clam", + } + path = ConfigServiceV2Client.settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_settings_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "whelk" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6007,7 +6608,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "octopus", } path = ConfigServiceV2Client.common_billing_account_path(**expected) @@ -6017,7 +6618,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "oyster" expected = "folders/{folder}".format(folder=folder,) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -6025,7 +6626,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "nudibranch", } path = ConfigServiceV2Client.common_folder_path(**expected) @@ -6035,7 +6636,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "cuttlefish" expected = "organizations/{organization}".format(organization=organization,) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -6043,7 +6644,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "mussel", } path = ConfigServiceV2Client.common_organization_path(**expected) @@ -6053,7 +6654,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "winkle" expected = "projects/{project}".format(project=project,) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -6061,7 +6662,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nautilus", } path = ConfigServiceV2Client.common_project_path(**expected) @@ -6071,8 +6672,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "scallop" + location = "abalone" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -6082,8 +6683,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "squid", + "location": "clam", } path = ConfigServiceV2Client.common_location_path(**expected) diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 0b3b202eb..e87e1c26d 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 764a76121..e6883889d 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1024,6 +1024,7 @@ def test_get_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1039,6 +1040,7 @@ def test_get_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1078,6 +1080,7 @@ async def test_get_log_metric_async( name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1094,6 +1097,7 @@ async def test_get_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1249,6 +1253,7 @@ def test_create_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1264,6 +1269,7 @@ def test_create_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1307,6 +1313,7 @@ async def test_create_log_metric_async( name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1323,6 +1330,7 @@ async def test_create_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1500,6 +1508,7 @@ def test_update_log_metric(request_type, transport: str = "grpc"): name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1515,6 +1524,7 @@ def test_update_log_metric(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1558,6 +1568,7 @@ async def test_update_log_metric_async( name="name_value", description="description_value", filter="filter_value", + disabled=True, value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1574,6 +1585,7 @@ async def test_update_log_metric_async( assert response.name == "name_value" assert response.description == "description_value" assert response.filter == "filter_value" + assert response.disabled is True assert response.value_extractor == "value_extractor_value" assert response.version == logging_metrics.LogMetric.ApiVersion.V1 From 4a46d0e12b238e5332cae96279a3c260b74a10cf Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 9 Mar 2022 12:28:34 +0100 Subject: [PATCH 08/36] chore(deps): update all dependencies (#492) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-bigquery](https://togithub.com/googleapis/python-bigquery) | `==2.32.0` -> `==2.34.2` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.34.2/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.34.2/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.34.2/compatibility-slim/2.32.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-bigquery/2.34.2/confidence-slim/2.32.0)](https://docs.renovatebot.com/merge-confidence/) | | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.9.0` -> `==2.10.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.10.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.10.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.10.0/compatibility-slim/2.9.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.10.0/confidence-slim/2.9.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-bigquery ### [`v2.34.2`](https://togithub.com/googleapis/python-bigquery/blob/HEAD/CHANGELOG.md#​2342-httpsgithubcomgoogleapispython-bigquerycomparev2341v2342-2022-03-05) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.34.1...v2.34.2) ### [`v2.34.1`](https://togithub.com/googleapis/python-bigquery/blob/HEAD/CHANGELOG.md#​2341-httpsgithubcomgoogleapispython-bigquerycomparev2340v2341-2022-03-02) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.34.0...v2.34.1) ### [`v2.34.0`](https://togithub.com/googleapis/python-bigquery/blob/HEAD/CHANGELOG.md#​2340-httpsgithubcomgoogleapispython-bigquerycomparev2330v2340-2022-02-18) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.33.0...v2.34.0) ##### Features - support BI Engine statistics in query job ([#​1144](https://togithub.com/googleapis/python-bigquery/issues/1144)) ([7482549](https://togithub.com/googleapis/python-bigquery/commit/7482549cb42ed5302634ab4fb7b4efcd97b35c68)) ### [`v2.33.0`](https://togithub.com/googleapis/python-bigquery/blob/HEAD/CHANGELOG.md#​2330-httpsgithubcomgoogleapispython-bigquerycomparev2320v2330-2022-02-16) [Compare Source](https://togithub.com/googleapis/python-bigquery/compare/v2.32.0...v2.33.0) ##### Features - add `--no_query_cache` option to `%%bigquery` magics to disable query cache ([#​1141](https://togithub.com/googleapis/python-bigquery/issues/1141)) ([7dd30af](https://togithub.com/googleapis/python-bigquery/commit/7dd30af41b8a595b96176c964ba14aa41645ef0d)) ##### Bug Fixes - return 403 when VPC-SC violation happens ([#​1131](https://togithub.com/googleapis/python-bigquery/issues/1131)) ([f5daa9b](https://togithub.com/googleapis/python-bigquery/commit/f5daa9b41377a58cb3220bb2ab7c72adc6462196)) ##### Documentation - reference BigQuery REST API defaults in `LoadJobConfig` descrip… ([#​1132](https://togithub.com/googleapis/python-bigquery/issues/1132)) ([18d9580](https://togithub.com/googleapis/python-bigquery/commit/18d958062721d6be81e7bd7a5bd66f277344a864)) - show common job properties in `get_job` and `cancel_job` samples ([#​1137](https://togithub.com/googleapis/python-bigquery/issues/1137)) ([8edc10d](https://togithub.com/googleapis/python-bigquery/commit/8edc10d019bd96defebc4f92a47774901e9b956f))
googleapis/python-pubsub ### [`v2.10.0`](https://togithub.com/googleapis/python-pubsub/blob/HEAD/CHANGELOG.md#​2100-httpsgithubcomgoogleapispython-pubsubcomparev290v2100-2022-03-04) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.9.0...v2.10.0) ##### Features - add api key support ([#​571](https://togithub.com/googleapis/python-pubsub/issues/571)) ([cdda762](https://togithub.com/googleapis/python-pubsub/commit/cdda762f6d15d96f5e2d7fac975f3494dc49eaa9)) - add exactly once delivery flag ([#​577](https://togithub.com/googleapis/python-pubsub/issues/577)) ([d6614e2](https://togithub.com/googleapis/python-pubsub/commit/d6614e274328c58449e67dfc788e2e7986c0c10b)) - add support for exactly once delivery ([#​578](https://togithub.com/googleapis/python-pubsub/issues/578)) ([95a86fa](https://togithub.com/googleapis/python-pubsub/commit/95a86fa5f528701b760064f0cece0efa4e60cd44)) - exactly-once delivery support ([#​550](https://togithub.com/googleapis/python-pubsub/issues/550)) ([2fb6e15](https://togithub.com/googleapis/python-pubsub/commit/2fb6e1533192ae81dceee5c71283169a0a85a015)) ##### Bug Fixes - **deps:** move libcst to extras ([#​585](https://togithub.com/googleapis/python-pubsub/issues/585)) ([0846762](https://togithub.com/googleapis/python-pubsub/commit/084676243ca4afd54cda601e589b80883f9703a3)) - refactor client classes for safer type checking ([#​552](https://togithub.com/googleapis/python-pubsub/issues/552)) ([7f705be](https://togithub.com/googleapis/python-pubsub/commit/7f705beb927383f14b9d56f0341ee0de101f7c05)) - resolve DuplicateCredentialArgs error when using credentials_file ([8ca8cf2](https://togithub.com/googleapis/python-pubsub/commit/8ca8cf27333baf823a1dffd081e63079f1a12625)) ##### Samples - samples: create subscription with filtering enabled [#​580](https://togithub.com/googleapis/python-pubsub/pull/580) - samples: handle empty response in sync pull samples [#​586](https://togithub.com/googleapis/python-pubsub/pull/586) - samples: sample for receiving messages with exactly-once delivery enabled [#​588](https://togithub.com/googleapis/python-pubsub/pull/588) - samples: create subscription with exactly once delivery [#​592](https://togithub.com/googleapis/python-pubsub/pull/592) [https://github.com/googleapis/python-pubsub/pull/588](https://togithub.com/googleapis/python-pubsub/pull/588)/588 ##### Documentation - add autogenerated code snippets ([aa3754c](https://togithub.com/googleapis/python-pubsub/commit/aa3754cf432bd02be2734a23a32d5b36cd216aee)) - Docs have inconsistent default values for max_latency and max_bytes ([#​572](https://togithub.com/googleapis/python-pubsub/issues/572)) ([d136dfd](https://togithub.com/googleapis/python-pubsub/commit/d136dfdb69ebeebd1411a1415f863b94d07078f0))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://togithub.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 7e49254f4..b44576c1b 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ google-cloud-logging==3.0.0 -google-cloud-bigquery==2.32.0 +google-cloud-bigquery==2.34.2 google-cloud-storage==2.1.0; python_version == '3.6' google-cloud-storage==2.1.0; python_version >= '3.7' -google-cloud-pubsub==2.9.0 +google-cloud-pubsub==2.10.0 From 6f0c61ce2f1a404c22cbc6543c45b8be1d02aafc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 10 Mar 2022 00:52:13 +0000 Subject: [PATCH 09/36] chore: Adding support for pytest-xdist and pytest-parallel (#496) Source-Link: https://github.com/googleapis/synthtool/commit/38e11ad1104dcc1e63b52691ddf2fe4015d06955 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 --- .github/.OwlBot.lock.yaml | 3 +- samples/snippets/noxfile.py | 80 +++++++++++++++++++++---------------- 2 files changed, 47 insertions(+), 36 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index fa15cb546..44c78f7cc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 - + digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 20cdfc620..4c808af73 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 891794e08fe73dd7e6aa34ddfc6fbe2419de9fc1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 10 Mar 2022 11:44:16 +0100 Subject: [PATCH 10/36] chore(deps): update dependency google-cloud-pubsub to v2.11.0 (#499) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.10.0` -> `==2.11.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.11.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.11.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.11.0/compatibility-slim/2.10.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.11.0/confidence-slim/2.10.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-pubsub ### [`v2.11.0`](https://togithub.com/googleapis/python-pubsub/blob/HEAD/CHANGELOG.md#​2110-httpsgithubcomgoogleapispython-pubsubcomparev2100v2110-2022-03-09) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.10.0...v2.11.0) ##### Features - retry temporary GRPC statuses for ack/modack/nack when exactly-once delivery is enabled ([#​607](https://togithub.com/googleapis/python-pubsub/issues/607)) ([a91bed8](https://togithub.com/googleapis/python-pubsub/commit/a91bed829c9040fcc6c1e70b99b66188ac4ded40)) - return singleton success future for exactly-once methods in Message ([#​608](https://togithub.com/googleapis/python-pubsub/issues/608)) ([253ced2](https://togithub.com/googleapis/python-pubsub/commit/253ced28f308450c7a1a93cc38f6d101ecd7d4c0)) ##### Bug Fixes - **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#​600](https://togithub.com/googleapis/python-pubsub/issues/600)) ([1608b7f](https://togithub.com/googleapis/python-pubsub/commit/1608b7ffdd5b5db87e1e55fde763440ca9a4086e)) - **deps:** require proto-plus>=1.15.0 ([1608b7f](https://togithub.com/googleapis/python-pubsub/commit/1608b7ffdd5b5db87e1e55fde763440ca9a4086e))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index b44576c1b..a0c73323d 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -2,4 +2,4 @@ google-cloud-logging==3.0.0 google-cloud-bigquery==2.34.2 google-cloud-storage==2.1.0; python_version == '3.6' google-cloud-storage==2.1.0; python_version >= '3.7' -google-cloud-pubsub==2.10.0 +google-cloud-pubsub==2.11.0 From 8cef3b78aa039a1432df53245df26539fb22927a Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Mon, 14 Mar 2022 16:38:39 -0700 Subject: [PATCH 11/36] chore: Update environment tests submodule (#503) --- tests/environment | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/environment b/tests/environment index 41c32ce34..21f1ea63a 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 41c32ce3425529680e32701549d3f682f9c82b63 +Subproject commit 21f1ea63a567dfd1b601f7cb8ee6177c77f82cc5 From 4bc3d0dbeded5e2fecd9486983c84e20af91e5d8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 15 Mar 2022 16:54:35 +0100 Subject: [PATCH 12/36] chore(deps): update all dependencies (#502) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-storage](https://togithub.com/googleapis/python-storage) | `==2.1.0` -> `==2.2.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/2.2.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/2.2.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/2.2.0/compatibility-slim/2.1.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-storage/2.2.0/confidence-slim/2.1.0)](https://docs.renovatebot.com/merge-confidence/) | | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | `==7.0.1` -> `==7.1.0` | [![age](https://badges.renovateapi.com/packages/pypi/pytest/7.1.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pytest/7.1.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pytest/7.1.0/compatibility-slim/7.0.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pytest/7.1.0/confidence-slim/7.0.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-storage ### [`v2.2.0`](https://togithub.com/googleapis/python-storage/blob/HEAD/CHANGELOG.md#​220-httpsgithubcomgoogleapispython-storagecomparev210v220-2022-03-14) [Compare Source](https://togithub.com/googleapis/python-storage/compare/v2.1.0...v2.2.0) ##### Features - allow no project in client methods using storage emulator ([#​703](https://togithub.com/googleapis/python-storage/issues/703)) ([bcde0ec](https://togithub.com/googleapis/python-storage/commit/bcde0ec619d7d303892bcc0863b7f977c79f7649)) ##### Bug Fixes - add user agent in python-storage when calling resumable media ([c7bf615](https://togithub.com/googleapis/python-storage/commit/c7bf615909a04f3bab3efb1047a9f4ba659bba19)) - **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#​722](https://togithub.com/googleapis/python-storage/issues/722)) ([e9aab38](https://togithub.com/googleapis/python-storage/commit/e9aab389f868799d4425133954bad4f1cbb85786)) - Fix BlobReader handling of interleaved reads and seeks ([#​721](https://togithub.com/googleapis/python-storage/issues/721)) ([5d1cfd2](https://togithub.com/googleapis/python-storage/commit/5d1cfd2050321481a3bc4acbe80537ea666506fa)) - retry client side requests timeout ([#​727](https://togithub.com/googleapis/python-storage/issues/727)) ([e0b3b35](https://togithub.com/googleapis/python-storage/commit/e0b3b354d51e4be7c563d7f2f628a7139df842c0)) ##### Documentation - fixed download_blob_to_file example ([#​704](https://togithub.com/googleapis/python-storage/issues/704)) ([2c94d98](https://togithub.com/googleapis/python-storage/commit/2c94d98ed21cc768cfa54fac3d734254fc4d8480))
pytest-dev/pytest ### [`v7.1.0`](https://togithub.com/pytest-dev/pytest/releases/7.1.0) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/7.0.1...7.1.0) # pytest 7.1.0 (2022-03-13) ## Breaking Changes - [#​8838](https://togithub.com/pytest-dev/pytest/issues/8838): As per our policy, the following features have been deprecated in the 6.X series and are now removed: - `pytest._fillfuncargs` function. - `pytest_warning_captured` hook - use `pytest_warning_recorded` instead. - `-k -foobar` syntax - use `-k 'not foobar'` instead. - `-k foobar:` syntax. - `pytest.collect` module - import from `pytest` directly. For more information consult [Deprecations and Removals](https://docs.pytest.org/en/latest/deprecations.html) in the docs. - [#​9437](https://togithub.com/pytest-dev/pytest/issues/9437): Dropped support for Python 3.6, which reached [end-of-life](https://devguide.python.org/#status-of-python-branches) at 2021-12-23. ## Improvements - [#​5192](https://togithub.com/pytest-dev/pytest/issues/5192): Fixed test output for some data types where `-v` would show less information. Also, when showing diffs for sequences, `-q` would produce full diffs instead of the expected diff. - [#​9362](https://togithub.com/pytest-dev/pytest/issues/9362): pytest now avoids specialized assert formatting when it is detected that the default `__eq__` is overridden in `attrs` or `dataclasses`. - [#​9536](https://togithub.com/pytest-dev/pytest/issues/9536): When `-vv` is given on command line, show skipping and xfail reasons in full instead of truncating them to fit the terminal width. - [#​9644](https://togithub.com/pytest-dev/pytest/issues/9644): More information about the location of resources that led Python to raise `ResourceWarning`{.interpreted-text role="class"} can now be obtained by enabling `tracemalloc`{.interpreted-text role="mod"}. See `resource-warnings`{.interpreted-text role="ref"} for more information. - [#​9678](https://togithub.com/pytest-dev/pytest/issues/9678): More types are now accepted in the `ids` argument to `@pytest.mark.parametrize`. Previously only \[str]{.title-ref}, \[float]{.title-ref}, \[int]{.title-ref} and \[bool]{.title-ref} were accepted; now \[bytes]{.title-ref}, \[complex]{.title-ref}, \[re.Pattern]{.title-ref}, \[Enum]{.title-ref} and anything with a \[\__name\_\_]{.title-ref} are also accepted. - [#​9692](https://togithub.com/pytest-dev/pytest/issues/9692): `pytest.approx`{.interpreted-text role="func"} now raises a `TypeError`{.interpreted-text role="class"} when given an unordered sequence (such as `set`{.interpreted-text role="class"}). Note that this implies that custom classes which only implement `__iter__` and `__len__` are no longer supported as they don't guarantee order. ## Bug Fixes - [#​8242](https://togithub.com/pytest-dev/pytest/issues/8242): The deprecation of raising `unittest.SkipTest`{.interpreted-text role="class"} to skip collection of tests during the pytest collection phase is reverted - this is now a supported feature again. - [#​9493](https://togithub.com/pytest-dev/pytest/issues/9493): Symbolic link components are no longer resolved in conftest paths. This means that if a conftest appears twice in collection tree, using symlinks, it will be executed twice. For example, given > tests/real/conftest.py > tests/real/test_it.py > tests/link -> tests/real running `pytest tests` now imports the conftest twice, once as `tests/real/conftest.py` and once as `tests/link/conftest.py`. This is a fix to match a similar change made to test collection itself in pytest 6.0 (see `6523`{.interpreted-text role="pull"} for details). - [#​9626](https://togithub.com/pytest-dev/pytest/issues/9626): Fixed count of selected tests on terminal collection summary when there were errors or skipped modules. If there were errors or skipped modules on collection, pytest would mistakenly subtract those from the selected count. - [#​9645](https://togithub.com/pytest-dev/pytest/issues/9645): Fixed regression where `--import-mode=importlib` used together with `PYTHONPATH`{.interpreted-text role="envvar"} or `pythonpath`{.interpreted-text role="confval"} would cause import errors in test suites. - [#​9708](https://togithub.com/pytest-dev/pytest/issues/9708): `pytester`{.interpreted-text role="fixture"} now requests a `monkeypatch`{.interpreted-text role="fixture"} fixture instead of creating one internally. This solves some issues with tests that involve pytest environment variables. - [#​9730](https://togithub.com/pytest-dev/pytest/issues/9730): Malformed `pyproject.toml` files now produce a clearer error message.
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://togithub.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-logging). --- samples/snippets/requirements-test.txt | 2 +- samples/snippets/requirements.txt | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index c531e813e..c265ab709 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==7.0.1 +pytest==7.1.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index a0c73323d..4cc5419a5 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,5 +1,4 @@ google-cloud-logging==3.0.0 google-cloud-bigquery==2.34.2 -google-cloud-storage==2.1.0; python_version == '3.6' -google-cloud-storage==2.1.0; python_version >= '3.7' +google-cloud-storage==2.2.0 google-cloud-pubsub==2.11.0 From 8192f469d23bec8503a9888a9f58816e2d108d01 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 16 Mar 2022 11:57:13 +0100 Subject: [PATCH 13/36] chore(deps): update dependency google-cloud-storage to v2.2.1 (#506) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 4cc5419a5..b234d190b 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.0.0 google-cloud-bigquery==2.34.2 -google-cloud-storage==2.2.0 +google-cloud-storage==2.2.1 google-cloud-pubsub==2.11.0 From 7836ddc0e006f0e0ff1bc22019c9f96190f84735 Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Thu, 17 Mar 2022 15:50:31 -0700 Subject: [PATCH 14/36] chore: Update blunderbuss (#508) --- .github/blunderbuss.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml index 148ebf4e8..28438484f 100644 --- a/.github/blunderbuss.yml +++ b/.github/blunderbuss.yml @@ -1,4 +1,4 @@ assign_issues: - - Daniel-Sanche + - arbrown assign_prs: - - Daniel-Sanche + - arbrown From af8ee644b31c631048e098a82a251dea08411efb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 18 Mar 2022 05:09:41 +0100 Subject: [PATCH 15/36] chore(deps): update dependency pytest to v7.1.1 (#509) Co-authored-by: Drew Brown --- samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index c265ab709..678dbc8ef 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==1.11.1 -pytest==7.1.0 +pytest==7.1.1 From a1ca7d1e386e9ed8314fae53c24e4c28c2edf604 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 29 Mar 2022 00:00:18 +0000 Subject: [PATCH 16/36] chore(python): use black==22.3.0 (#513) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe --- .github/.OwlBot.lock.yaml | 2 +- docs/conf.py | 5 +- google/cloud/logging_v2/_gapic.py | 31 +- google/cloud/logging_v2/_http.py | 22 +- google/cloud/logging_v2/client.py | 2 +- google/cloud/logging_v2/metric.py | 4 +- .../config_service_v2/async_client.py | 196 ++- .../services/config_service_v2/client.py | 293 +++- .../config_service_v2/transports/base.py | 82 +- .../config_service_v2/transports/grpc.py | 3 +- .../logging_service_v2/async_client.py | 59 +- .../services/logging_service_v2/client.py | 110 +- .../logging_service_v2/transports/base.py | 10 +- .../logging_service_v2/transports/grpc.py | 3 +- .../metrics_service_v2/async_client.py | 42 +- .../services/metrics_service_v2/client.py | 91 +- .../metrics_service_v2/transports/base.py | 14 +- .../metrics_service_v2/transports/grpc.py | 3 +- google/cloud/logging_v2/types/log_entry.py | 144 +- google/cloud/logging_v2/types/logging.py | 160 +- .../cloud/logging_v2/types/logging_config.py | 590 +++++-- .../cloud/logging_v2/types/logging_metrics.py | 113 +- noxfile.py | 9 +- samples/snippets/noxfile.py | 2 +- .../logging_v2/test_config_service_v2.py | 1424 +++++++++++++---- .../logging_v2/test_logging_service_v2.py | 475 ++++-- .../logging_v2/test_metrics_service_v2.py | 379 +++-- tests/unit/handlers/test_handlers.py | 62 +- tests/unit/handlers/test_structured_log.py | 70 +- .../transports/test_background_thread.py | 14 +- tests/unit/test__gapic.py | 4 +- tests/unit/test_logger.py | 18 +- 32 files changed, 3471 insertions(+), 965 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 44c78f7cc..87dd00611 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/docs/conf.py b/docs/conf.py index 743981d3a..04f5d0ef5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/google/cloud/logging_v2/_gapic.py b/google/cloud/logging_v2/_gapic.py index 3661d3d09..4b5429f2f 100644 --- a/google/cloud/logging_v2/_gapic.py +++ b/google/cloud/logging_v2/_gapic.py @@ -299,7 +299,12 @@ def sink_get(self, sink_name): ) def sink_update( - self, sink_name, filter_, destination, *, unique_writer_identity=False, + self, + sink_name, + filter_, + destination, + *, + unique_writer_identity=False, ): """Update a sink resource. @@ -326,7 +331,11 @@ def sink_update( protobuf to a dictionary). """ name = sink_name.split("/")[-1] # parse name out of full resoure name - sink_pb = LogSink(name=name, filter=filter_, destination=destination,) + sink_pb = LogSink( + name=name, + filter=filter_, + destination=destination, + ) request = UpdateSinkRequest( sink_name=sink_name, @@ -362,7 +371,7 @@ def sink_delete(self, sink_name): class _MetricsAPI(object): - """Helper mapping sink-related APIs. """ + """Helper mapping sink-related APIs.""" def __init__(self, gapic_api, client): self._gapic_api = gapic_api @@ -389,7 +398,9 @@ def list_metrics( """ path = f"projects/{project}" request = ListLogMetricsRequest( - parent=path, page_size=page_size, page_token=page_token, + parent=path, + page_size=page_size, + page_token=page_token, ) response = self._gapic_api.list_log_metrics(request=request) metric_iter = iter(response) @@ -449,7 +460,11 @@ def metric_get(self, project, metric_name): ) def metric_update( - self, project, metric_name, filter_, description, + self, + project, + metric_name, + filter_, + description, ): """Update a metric resource. @@ -465,7 +480,11 @@ def metric_update( protobuf to a dictionary). """ path = f"projects/{project}/metrics/{metric_name}" - metric_pb = LogMetric(name=path, filter=filter_, description=description,) + metric_pb = LogMetric( + name=path, + filter=filter_, + description=description, + ) metric_pb = self._gapic_api.update_log_metric( metric_name=path, metric=metric_pb ) diff --git a/google/cloud/logging_v2/_http.py b/google/cloud/logging_v2/_http.py index 21fb38606..cb5fd61eb 100644 --- a/google/cloud/logging_v2/_http.py +++ b/google/cloud/logging_v2/_http.py @@ -454,18 +454,18 @@ def metric_get(self, project, metric_name): def metric_update(self, project, metric_name, filter_, description): """Update a metric resource. - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update - - Args: - project (str): ID of the project containing the metric. - metric_name (str): the name of the metric - filter_ (str): the advanced logs filter expression defining the - entries exported by the metric. - description (str): description of the metric. + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/update - Returns: - dict: The returned (updated) resource. + Args: + project (str): ID of the project containing the metric. + metric_name (str): the name of the metric + filter_ (str): the advanced logs filter expression defining the + entries exported by the metric. + description (str): description of the metric. + + Returns: + dict: The returned (updated) resource. """ target = f"/projects/{project}/metrics/{metric_name}" data = {"name": metric_name, "filter": filter_, "description": description} diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index 3d5ea24fc..049737861 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -118,7 +118,7 @@ def __init__( client_options (Optional[Union[dict, google.api_core.client_options.ClientOptions]]): Client options used to set user options on the client. API Endpoint should be set through client_options. - """ + """ super(Client, self).__init__( project=project, credentials=credentials, diff --git a/google/cloud/logging_v2/metric.py b/google/cloud/logging_v2/metric.py index 2959bacc2..167165c97 100644 --- a/google/cloud/logging_v2/metric.py +++ b/google/cloud/logging_v2/metric.py @@ -20,8 +20,8 @@ class Metric(object): """Metrics represent named filters for log entries. - See - https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics """ def __init__(self, name, *, filter_=None, client=None, description=""): diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index de3a6bbb7..916fbd18b 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -318,12 +318,20 @@ def sample_list_buckets(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListBucketsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -391,7 +399,12 @@ def sample_get_bucket(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -462,7 +475,12 @@ def sample_create_bucket(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -541,7 +559,12 @@ def sample_update_bucket(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -606,7 +629,10 @@ def sample_delete_bucket(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def undelete_bucket( @@ -666,7 +692,10 @@ def sample_undelete_bucket(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def list_views( @@ -759,12 +788,20 @@ def sample_list_views(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListViewsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -832,7 +869,12 @@ def sample_get_view(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -902,7 +944,12 @@ def sample_create_view(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -974,7 +1021,12 @@ def sample_update_view(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1037,7 +1089,10 @@ def sample_delete_view(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def list_sinks( @@ -1145,12 +1200,20 @@ def sample_list_sinks(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListSinksAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1270,7 +1333,12 @@ def sample_get_sink(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1396,7 +1464,12 @@ def sample_create_sink(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1562,7 +1635,12 @@ def sample_update_sink(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1669,7 +1747,10 @@ def sample_delete_sink(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def list_exclusions( @@ -1779,12 +1860,20 @@ def sample_list_exclusions(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListExclusionsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1900,7 +1989,12 @@ def sample_get_exclusion(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2025,7 +2119,12 @@ def sample_create_exclusion(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2164,7 +2263,12 @@ def sample_update_exclusion(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2267,7 +2371,10 @@ def sample_delete_exclusion(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def get_cmek_settings( @@ -2356,7 +2463,12 @@ def sample_get_cmek_settings(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2452,7 +2564,12 @@ def sample_update_cmek_settings(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2574,7 +2691,12 @@ def sample_get_settings(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2706,7 +2828,12 @@ def sample_update_settings(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2778,7 +2905,12 @@ def sample_copy_log_entries(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -2800,7 +2932,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index 041b1c838..d14ea70da 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -59,7 +59,10 @@ class ConfigServiceV2ClientMeta(type): _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[ConfigServiceV2Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[ConfigServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -164,9 +167,13 @@ def transport(self) -> ConfigServiceV2Transport: return self._transport @staticmethod - def cmek_settings_path(project: str,) -> str: + def cmek_settings_path( + project: str, + ) -> str: """Returns a fully-qualified cmek_settings string.""" - return "projects/{project}/cmekSettings".format(project=project,) + return "projects/{project}/cmekSettings".format( + project=project, + ) @staticmethod def parse_cmek_settings_path(path: str) -> Dict[str, str]: @@ -175,10 +182,16 @@ def parse_cmek_settings_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def log_bucket_path(project: str, location: str, bucket: str,) -> str: + def log_bucket_path( + project: str, + location: str, + bucket: str, + ) -> str: """Returns a fully-qualified log_bucket string.""" return "projects/{project}/locations/{location}/buckets/{bucket}".format( - project=project, location=location, bucket=bucket, + project=project, + location=location, + bucket=bucket, ) @staticmethod @@ -191,10 +204,14 @@ def parse_log_bucket_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def log_exclusion_path(project: str, exclusion: str,) -> str: + def log_exclusion_path( + project: str, + exclusion: str, + ) -> str: """Returns a fully-qualified log_exclusion string.""" return "projects/{project}/exclusions/{exclusion}".format( - project=project, exclusion=exclusion, + project=project, + exclusion=exclusion, ) @staticmethod @@ -204,9 +221,15 @@ def parse_log_exclusion_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def log_sink_path(project: str, sink: str,) -> str: + def log_sink_path( + project: str, + sink: str, + ) -> str: """Returns a fully-qualified log_sink string.""" - return "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) + return "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) @staticmethod def parse_log_sink_path(path: str) -> Dict[str, str]: @@ -215,10 +238,18 @@ def parse_log_sink_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def log_view_path(project: str, location: str, bucket: str, view: str,) -> str: + def log_view_path( + project: str, + location: str, + bucket: str, + view: str, + ) -> str: """Returns a fully-qualified log_view string.""" return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( - project=project, location=location, bucket=bucket, view=view, + project=project, + location=location, + bucket=bucket, + view=view, ) @staticmethod @@ -231,9 +262,13 @@ def parse_log_view_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def settings_path(project: str,) -> str: + def settings_path( + project: str, + ) -> str: """Returns a fully-qualified settings string.""" - return "projects/{project}/settings".format(project=project,) + return "projects/{project}/settings".format( + project=project, + ) @staticmethod def parse_settings_path(path: str) -> Dict[str, str]: @@ -242,7 +277,9 @@ def parse_settings_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -255,9 +292,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -266,9 +307,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -277,9 +322,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -288,10 +337,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -563,12 +616,20 @@ def sample_list_buckets(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListBucketsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -637,7 +698,12 @@ def sample_get_bucket(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -709,7 +775,12 @@ def sample_create_bucket(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -789,7 +860,12 @@ def sample_update_bucket(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -855,7 +931,10 @@ def sample_delete_bucket(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def undelete_bucket( @@ -916,7 +995,10 @@ def sample_undelete_bucket(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def list_views( @@ -1009,12 +1091,20 @@ def sample_list_views(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListViewsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1083,7 +1173,12 @@ def sample_get_view(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1154,7 +1249,12 @@ def sample_create_view(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1227,7 +1327,12 @@ def sample_update_view(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1291,7 +1396,10 @@ def sample_delete_view(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def list_sinks( @@ -1388,12 +1496,20 @@ def sample_list_sinks(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListSinksPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1502,7 +1618,12 @@ def sample_get_sink(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1628,7 +1749,12 @@ def sample_create_sink(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1783,7 +1909,12 @@ def sample_update_sink(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1879,7 +2010,10 @@ def sample_delete_sink(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def list_exclusions( @@ -1978,12 +2112,20 @@ def sample_list_exclusions(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListExclusionsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -2088,7 +2230,12 @@ def sample_get_exclusion(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2213,7 +2360,12 @@ def sample_create_exclusion(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2352,7 +2504,12 @@ def sample_update_exclusion(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2444,7 +2601,10 @@ def sample_delete_exclusion(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def get_cmek_settings( @@ -2534,7 +2694,12 @@ def sample_get_cmek_settings(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2631,7 +2796,12 @@ def sample_update_cmek_settings(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2753,7 +2923,12 @@ def sample_get_settings(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2885,7 +3060,12 @@ def sample_update_settings(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2958,7 +3138,12 @@ def sample_copy_log_entries(): rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -2987,7 +3172,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 6dfc1fd2f..95de06d1a 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -32,7 +32,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -127,37 +129,59 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.list_buckets: gapic_v1.method.wrap_method( - self.list_buckets, default_timeout=None, client_info=client_info, + self.list_buckets, + default_timeout=None, + client_info=client_info, ), self.get_bucket: gapic_v1.method.wrap_method( - self.get_bucket, default_timeout=None, client_info=client_info, + self.get_bucket, + default_timeout=None, + client_info=client_info, ), self.create_bucket: gapic_v1.method.wrap_method( - self.create_bucket, default_timeout=None, client_info=client_info, + self.create_bucket, + default_timeout=None, + client_info=client_info, ), self.update_bucket: gapic_v1.method.wrap_method( - self.update_bucket, default_timeout=None, client_info=client_info, + self.update_bucket, + default_timeout=None, + client_info=client_info, ), self.delete_bucket: gapic_v1.method.wrap_method( - self.delete_bucket, default_timeout=None, client_info=client_info, + self.delete_bucket, + default_timeout=None, + client_info=client_info, ), self.undelete_bucket: gapic_v1.method.wrap_method( - self.undelete_bucket, default_timeout=None, client_info=client_info, + self.undelete_bucket, + default_timeout=None, + client_info=client_info, ), self.list_views: gapic_v1.method.wrap_method( - self.list_views, default_timeout=None, client_info=client_info, + self.list_views, + default_timeout=None, + client_info=client_info, ), self.get_view: gapic_v1.method.wrap_method( - self.get_view, default_timeout=None, client_info=client_info, + self.get_view, + default_timeout=None, + client_info=client_info, ), self.create_view: gapic_v1.method.wrap_method( - self.create_view, default_timeout=None, client_info=client_info, + self.create_view, + default_timeout=None, + client_info=client_info, ), self.update_view: gapic_v1.method.wrap_method( - self.update_view, default_timeout=None, client_info=client_info, + self.update_view, + default_timeout=None, + client_info=client_info, ), self.delete_view: gapic_v1.method.wrap_method( - self.delete_view, default_timeout=None, client_info=client_info, + self.delete_view, + default_timeout=None, + client_info=client_info, ), self.list_sinks: gapic_v1.method.wrap_method( self.list_sinks, @@ -192,7 +216,9 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.create_sink: gapic_v1.method.wrap_method( - self.create_sink, default_timeout=120.0, client_info=client_info, + self.create_sink, + default_timeout=120.0, + client_info=client_info, ), self.update_sink: gapic_v1.method.wrap_method( self.update_sink, @@ -259,10 +285,14 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.create_exclusion: gapic_v1.method.wrap_method( - self.create_exclusion, default_timeout=120.0, client_info=client_info, + self.create_exclusion, + default_timeout=120.0, + client_info=client_info, ), self.update_exclusion: gapic_v1.method.wrap_method( - self.update_exclusion, default_timeout=120.0, client_info=client_info, + self.update_exclusion, + default_timeout=120.0, + client_info=client_info, ), self.delete_exclusion: gapic_v1.method.wrap_method( self.delete_exclusion, @@ -281,7 +311,9 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_cmek_settings: gapic_v1.method.wrap_method( - self.get_cmek_settings, default_timeout=None, client_info=client_info, + self.get_cmek_settings, + default_timeout=None, + client_info=client_info, ), self.update_cmek_settings: gapic_v1.method.wrap_method( self.update_cmek_settings, @@ -289,22 +321,28 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.get_settings: gapic_v1.method.wrap_method( - self.get_settings, default_timeout=None, client_info=client_info, + self.get_settings, + default_timeout=None, + client_info=client_info, ), self.update_settings: gapic_v1.method.wrap_method( - self.update_settings, default_timeout=None, client_info=client_info, + self.update_settings, + default_timeout=None, + client_info=client_info, ), self.copy_log_entries: gapic_v1.method.wrap_method( - self.copy_log_entries, default_timeout=None, client_info=client_info, + self.copy_log_entries, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 301334f80..228f1c9a3 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -228,8 +228,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index c89da25a5..7973d4395 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -317,7 +317,10 @@ def sample_delete_log(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def write_log_entries( @@ -506,7 +509,12 @@ def sample_write_log_entries(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -655,12 +663,20 @@ def sample_list_log_entries(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListLogEntriesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -739,12 +755,20 @@ def sample_list_monitored_resource_descriptors(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListMonitoredResourceDescriptorsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -855,12 +879,20 @@ def sample_list_logs(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListLogsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -943,7 +975,12 @@ def request_generator(): ) # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -957,7 +994,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 3eae59704..8638cfb10 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -57,7 +57,10 @@ class LoggingServiceV2ClientMeta(type): _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[LoggingServiceV2Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[LoggingServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -162,9 +165,15 @@ def transport(self) -> LoggingServiceV2Transport: return self._transport @staticmethod - def log_path(project: str, log: str,) -> str: + def log_path( + project: str, + log: str, + ) -> str: """Returns a fully-qualified log string.""" - return "projects/{project}/logs/{log}".format(project=project, log=log,) + return "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) @staticmethod def parse_log_path(path: str) -> Dict[str, str]: @@ -173,7 +182,9 @@ def parse_log_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -186,9 +197,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -197,9 +212,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -208,9 +227,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -219,10 +242,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -488,7 +515,10 @@ def sample_delete_log(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def write_log_entries( @@ -665,7 +695,12 @@ def sample_write_log_entries(): rpc = self._transport._wrapped_methods[self._transport.write_log_entries] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -803,12 +838,20 @@ def sample_list_log_entries(): rpc = self._transport._wrapped_methods[self._transport.list_log_entries] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListLogEntriesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -879,12 +922,20 @@ def sample_list_monitored_resource_descriptors(): ] # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListMonitoredResourceDescriptorsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -984,12 +1035,20 @@ def sample_list_logs(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListLogsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1057,7 +1116,12 @@ def request_generator(): rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1078,7 +1142,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 5f474f006..716a2fbbc 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -30,7 +30,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -226,9 +228,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 76b562d7e..176d4475f 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index e3bf4c51a..af6265e82 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -307,12 +307,20 @@ def sample_list_log_metrics(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListLogMetricsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -427,7 +435,12 @@ def sample_get_log_metric(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -547,7 +560,12 @@ def sample_create_log_metric(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -679,7 +697,12 @@ def sample_update_log_metric(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -776,7 +799,10 @@ def sample_delete_log_metric(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def __aenter__(self): @@ -788,7 +814,9 @@ async def __aexit__(self, exc_type, exc, tb): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index 5ab25db20..bb2221b85 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -58,7 +58,10 @@ class MetricsServiceV2ClientMeta(type): _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[MetricsServiceV2Transport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[MetricsServiceV2Transport]: """Returns an appropriate transport class. Args: @@ -163,10 +166,14 @@ def transport(self) -> MetricsServiceV2Transport: return self._transport @staticmethod - def log_metric_path(project: str, metric: str,) -> str: + def log_metric_path( + project: str, + metric: str, + ) -> str: """Returns a fully-qualified log_metric string.""" return "projects/{project}/metrics/{metric}".format( - project=project, metric=metric, + project=project, + metric=metric, ) @staticmethod @@ -176,7 +183,9 @@ def parse_log_metric_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -189,9 +198,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -200,9 +213,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -211,9 +228,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -222,10 +243,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -490,12 +515,20 @@ def sample_list_log_metrics(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListLogMetricsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -599,7 +632,12 @@ def sample_get_log_metric(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -719,7 +757,12 @@ def sample_create_log_metric(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -840,7 +883,12 @@ def sample_update_log_metric(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -926,7 +974,10 @@ def sample_delete_log_metric(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def __enter__(self): @@ -945,7 +996,9 @@ def __exit__(self, type, value, traceback): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index b3d9bab57..cc483aeff 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -30,7 +30,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-cloud-logging",).version, + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() @@ -158,7 +160,9 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.create_log_metric: gapic_v1.method.wrap_method( - self.create_log_metric, default_timeout=60.0, client_info=client_info, + self.create_log_metric, + default_timeout=60.0, + client_info=client_info, ), self.update_log_metric: gapic_v1.method.wrap_method( self.update_log_metric, @@ -197,9 +201,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index d0241fdd2..6c1fd9b73 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -225,8 +225,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/google/cloud/logging_v2/types/log_entry.py b/google/cloud/logging_v2/types/log_entry.py index 1bc7a3ea4..2bdea1b73 100644 --- a/google/cloud/logging_v2/types/log_entry.py +++ b/google/cloud/logging_v2/types/log_entry.py @@ -25,7 +25,12 @@ __protobuf__ = proto.module( package="google.logging.v2", - manifest={"LogEntry", "LogEntryOperation", "LogEntrySourceLocation", "LogSplit",}, + manifest={ + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + }, ) @@ -191,35 +196,88 @@ class LogEntry(proto.Message): entries split from a single LogEntry. """ - log_name = proto.Field(proto.STRING, number=12,) + log_name = proto.Field( + proto.STRING, + number=12, + ) resource = proto.Field( - proto.MESSAGE, number=8, message=monitored_resource_pb2.MonitoredResource, + proto.MESSAGE, + number=8, + message=monitored_resource_pb2.MonitoredResource, ) proto_payload = proto.Field( - proto.MESSAGE, number=2, oneof="payload", message=any_pb2.Any, + proto.MESSAGE, + number=2, + oneof="payload", + message=any_pb2.Any, + ) + text_payload = proto.Field( + proto.STRING, + number=3, + oneof="payload", ) - text_payload = proto.Field(proto.STRING, number=3, oneof="payload",) json_payload = proto.Field( - proto.MESSAGE, number=6, oneof="payload", message=struct_pb2.Struct, + proto.MESSAGE, + number=6, + oneof="payload", + message=struct_pb2.Struct, + ) + timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, ) - timestamp = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) receive_timestamp = proto.Field( - proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + severity = proto.Field( + proto.ENUM, + number=10, + enum=log_severity_pb2.LogSeverity, + ) + insert_id = proto.Field( + proto.STRING, + number=4, ) - severity = proto.Field(proto.ENUM, number=10, enum=log_severity_pb2.LogSeverity,) - insert_id = proto.Field(proto.STRING, number=4,) http_request = proto.Field( - proto.MESSAGE, number=7, message=http_request_pb2.HttpRequest, + proto.MESSAGE, + number=7, + message=http_request_pb2.HttpRequest, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=11, + ) + operation = proto.Field( + proto.MESSAGE, + number=15, + message="LogEntryOperation", + ) + trace = proto.Field( + proto.STRING, + number=22, + ) + span_id = proto.Field( + proto.STRING, + number=27, + ) + trace_sampled = proto.Field( + proto.BOOL, + number=30, ) - labels = proto.MapField(proto.STRING, proto.STRING, number=11,) - operation = proto.Field(proto.MESSAGE, number=15, message="LogEntryOperation",) - trace = proto.Field(proto.STRING, number=22,) - span_id = proto.Field(proto.STRING, number=27,) - trace_sampled = proto.Field(proto.BOOL, number=30,) source_location = proto.Field( - proto.MESSAGE, number=23, message="LogEntrySourceLocation", + proto.MESSAGE, + number=23, + message="LogEntrySourceLocation", + ) + split = proto.Field( + proto.MESSAGE, + number=35, + message="LogSplit", ) - split = proto.Field(proto.MESSAGE, number=35, message="LogSplit",) class LogEntryOperation(proto.Message): @@ -244,10 +302,22 @@ class LogEntryOperation(proto.Message): last log entry in the operation. """ - id = proto.Field(proto.STRING, number=1,) - producer = proto.Field(proto.STRING, number=2,) - first = proto.Field(proto.BOOL, number=3,) - last = proto.Field(proto.BOOL, number=4,) + id = proto.Field( + proto.STRING, + number=1, + ) + producer = proto.Field( + proto.STRING, + number=2, + ) + first = proto.Field( + proto.BOOL, + number=3, + ) + last = proto.Field( + proto.BOOL, + number=4, + ) class LogEntrySourceLocation(proto.Message): @@ -272,9 +342,18 @@ class LogEntrySourceLocation(proto.Message): (Go), ``function`` (Python). """ - file = proto.Field(proto.STRING, number=1,) - line = proto.Field(proto.INT64, number=2,) - function = proto.Field(proto.STRING, number=3,) + file = proto.Field( + proto.STRING, + number=1, + ) + line = proto.Field( + proto.INT64, + number=2, + ) + function = proto.Field( + proto.STRING, + number=3, + ) class LogSplit(proto.Message): @@ -298,9 +377,18 @@ class LogSplit(proto.Message): original LogEntry was split into. """ - uid = proto.Field(proto.STRING, number=1,) - index = proto.Field(proto.INT32, number=2,) - total_splits = proto.Field(proto.INT32, number=3,) + uid = proto.Field( + proto.STRING, + number=1, + ) + index = proto.Field( + proto.INT32, + number=2, + ) + total_splits = proto.Field( + proto.INT32, + number=3, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging.py b/google/cloud/logging_v2/types/logging.py index 76d86e34f..383a4ef77 100644 --- a/google/cloud/logging_v2/types/logging.py +++ b/google/cloud/logging_v2/types/logging.py @@ -60,7 +60,10 @@ class DeleteLogRequest(proto.Message): [LogEntry][google.logging.v2.LogEntry]. """ - log_name = proto.Field(proto.STRING, number=1,) + log_name = proto.Field( + proto.STRING, + number=1, + ) class WriteLogEntriesRequest(proto.Message): @@ -152,19 +155,37 @@ class WriteLogEntriesRequest(proto.Message): properly before sending valuable data. """ - log_name = proto.Field(proto.STRING, number=1,) + log_name = proto.Field( + proto.STRING, + number=1, + ) resource = proto.Field( - proto.MESSAGE, number=2, message=monitored_resource_pb2.MonitoredResource, + proto.MESSAGE, + number=2, + message=monitored_resource_pb2.MonitoredResource, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + entries = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=log_entry.LogEntry, + ) + partial_success = proto.Field( + proto.BOOL, + number=5, + ) + dry_run = proto.Field( + proto.BOOL, + number=6, ) - labels = proto.MapField(proto.STRING, proto.STRING, number=3,) - entries = proto.RepeatedField(proto.MESSAGE, number=4, message=log_entry.LogEntry,) - partial_success = proto.Field(proto.BOOL, number=5,) - dry_run = proto.Field(proto.BOOL, number=6,) class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. - """ + r"""Result returned from WriteLogEntries.""" class WriteLogEntriesPartialErrors(proto.Message): @@ -182,7 +203,10 @@ class WriteLogEntriesPartialErrors(proto.Message): """ log_entry_errors = proto.MapField( - proto.INT32, proto.MESSAGE, number=1, message=status_pb2.Status, + proto.INT32, + proto.MESSAGE, + number=1, + message=status_pb2.Status, ) @@ -241,11 +265,26 @@ class ListLogEntriesRequest(proto.Message): should be identical to those in the previous call. """ - resource_names = proto.RepeatedField(proto.STRING, number=8,) - filter = proto.Field(proto.STRING, number=2,) - order_by = proto.Field(proto.STRING, number=3,) - page_size = proto.Field(proto.INT32, number=4,) - page_token = proto.Field(proto.STRING, number=5,) + resource_names = proto.RepeatedField( + proto.STRING, + number=8, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + order_by = proto.Field( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) class ListLogEntriesResponse(proto.Message): @@ -277,8 +316,15 @@ class ListLogEntriesResponse(proto.Message): def raw_page(self): return self - entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) - next_page_token = proto.Field(proto.STRING, number=2,) + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class ListMonitoredResourceDescriptorsRequest(proto.Message): @@ -298,8 +344,14 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): should be identical to those in the previous call. """ - page_size = proto.Field(proto.INT32, number=1,) - page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field( + proto.INT32, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) class ListMonitoredResourceDescriptorsResponse(proto.Message): @@ -324,7 +376,10 @@ def raw_page(self): number=1, message=monitored_resource_pb2.MonitoredResourceDescriptor, ) - next_page_token = proto.Field(proto.STRING, number=2,) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class ListLogsRequest(proto.Message): @@ -365,10 +420,22 @@ class ListLogsRequest(proto.Message): - ``folders/[FOLDER_ID]`` """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - resource_names = proto.RepeatedField(proto.STRING, number=8,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + resource_names = proto.RepeatedField( + proto.STRING, + number=8, + ) class ListLogsResponse(proto.Message): @@ -390,8 +457,14 @@ class ListLogsResponse(proto.Message): def raw_page(self): return self - log_names = proto.RepeatedField(proto.STRING, number=3,) - next_page_token = proto.Field(proto.STRING, number=2,) + log_names = proto.RepeatedField( + proto.STRING, + number=3, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class TailLogEntriesRequest(proto.Message): @@ -432,9 +505,19 @@ class TailLogEntriesRequest(proto.Message): milliseconds. """ - resource_names = proto.RepeatedField(proto.STRING, number=1,) - filter = proto.Field(proto.STRING, number=2,) - buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration_pb2.Duration,) + resource_names = proto.RepeatedField( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + buffer_window = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) class TailLogEntriesResponse(proto.Message): @@ -476,13 +559,24 @@ class Reason(proto.Enum): NOT_CONSUMED = 2 reason = proto.Field( - proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", + proto.ENUM, + number=1, + enum="TailLogEntriesResponse.SuppressionInfo.Reason", + ) + suppressed_count = proto.Field( + proto.INT32, + number=2, ) - suppressed_count = proto.Field(proto.INT32, number=2,) - entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) suppression_info = proto.RepeatedField( - proto.MESSAGE, number=2, message=SuppressionInfo, + proto.MESSAGE, + number=2, + message=SuppressionInfo, ) diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py index 3dab7a143..c2fcf30bd 100644 --- a/google/cloud/logging_v2/types/logging_config.py +++ b/google/cloud/logging_v2/types/logging_config.py @@ -153,15 +153,46 @@ class LogBucket(proto.Message): KMS key is allowed. """ - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=3,) - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) - retention_days = proto.Field(proto.INT32, number=11,) - locked = proto.Field(proto.BOOL, number=9,) - lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) - restricted_fields = proto.RepeatedField(proto.STRING, number=15,) - cmek_settings = proto.Field(proto.MESSAGE, number=19, message="CmekSettings",) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + retention_days = proto.Field( + proto.INT32, + number=11, + ) + locked = proto.Field( + proto.BOOL, + number=9, + ) + lifecycle_state = proto.Field( + proto.ENUM, + number=12, + enum="LifecycleState", + ) + restricted_fields = proto.RepeatedField( + proto.STRING, + number=15, + ) + cmek_settings = proto.Field( + proto.MESSAGE, + number=19, + message="CmekSettings", + ) class LogView(proto.Message): @@ -199,11 +230,28 @@ class LogView(proto.Message): "gce_instance" AND LOG_ID("stdout") """ - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=3,) - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) - filter = proto.Field(proto.STRING, number=7,) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + filter = proto.Field( + proto.STRING, + number=7, + ) class LogSink(proto.Message): @@ -326,23 +374,59 @@ class VersionFormat(proto.Enum): V2 = 1 V1 = 2 - name = proto.Field(proto.STRING, number=1,) - destination = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=5,) - description = proto.Field(proto.STRING, number=18,) - disabled = proto.Field(proto.BOOL, number=19,) - exclusions = proto.RepeatedField(proto.MESSAGE, number=16, message="LogExclusion",) - output_version_format = proto.Field(proto.ENUM, number=6, enum=VersionFormat,) - writer_identity = proto.Field(proto.STRING, number=8,) - include_children = proto.Field(proto.BOOL, number=9,) + name = proto.Field( + proto.STRING, + number=1, + ) + destination = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + description = proto.Field( + proto.STRING, + number=18, + ) + disabled = proto.Field( + proto.BOOL, + number=19, + ) + exclusions = proto.RepeatedField( + proto.MESSAGE, + number=16, + message="LogExclusion", + ) + output_version_format = proto.Field( + proto.ENUM, + number=6, + enum=VersionFormat, + ) + writer_identity = proto.Field( + proto.STRING, + number=8, + ) + include_children = proto.Field( + proto.BOOL, + number=9, + ) bigquery_options = proto.Field( - proto.MESSAGE, number=12, oneof="options", message="BigQueryOptions", + proto.MESSAGE, + number=12, + oneof="options", + message="BigQueryOptions", ) create_time = proto.Field( - proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, ) update_time = proto.Field( - proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, ) @@ -373,8 +457,14 @@ class BigQueryOptions(proto.Message): will have this field set to false. """ - use_partitioned_tables = proto.Field(proto.BOOL, number=1,) - uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3,) + use_partitioned_tables = proto.Field( + proto.BOOL, + number=1, + ) + uses_timestamp_column_partitioning = proto.Field( + proto.BOOL, + number=3, + ) class ListBucketsRequest(proto.Message): @@ -408,9 +498,18 @@ class ListBucketsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListBucketsResponse(proto.Message): @@ -430,8 +529,15 @@ class ListBucketsResponse(proto.Message): def raw_page(self): return self - buckets = proto.RepeatedField(proto.MESSAGE, number=1, message="LogBucket",) - next_page_token = proto.Field(proto.STRING, number=2,) + buckets = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogBucket", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class CreateBucketRequest(proto.Message): @@ -460,9 +566,19 @@ class CreateBucketRequest(proto.Message): name field in the bucket is ignored. """ - parent = proto.Field(proto.STRING, number=1,) - bucket_id = proto.Field(proto.STRING, number=2,) - bucket = proto.Field(proto.MESSAGE, number=3, message="LogBucket",) + parent = proto.Field( + proto.STRING, + number=1, + ) + bucket_id = proto.Field( + proto.STRING, + number=2, + ) + bucket = proto.Field( + proto.MESSAGE, + number=3, + message="LogBucket", + ) class UpdateBucketRequest(proto.Message): @@ -496,10 +612,19 @@ class UpdateBucketRequest(proto.Message): For example: ``updateMask=retention_days`` """ - name = proto.Field(proto.STRING, number=1,) - bucket = proto.Field(proto.MESSAGE, number=2, message="LogBucket",) + name = proto.Field( + proto.STRING, + number=1, + ) + bucket = proto.Field( + proto.MESSAGE, + number=2, + message="LogBucket", + ) update_mask = proto.Field( - proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, ) @@ -522,7 +647,10 @@ class GetBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class DeleteBucketRequest(proto.Message): @@ -544,7 +672,10 @@ class DeleteBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class UndeleteBucketRequest(proto.Message): @@ -566,7 +697,10 @@ class UndeleteBucketRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class ListViewsRequest(proto.Message): @@ -594,9 +728,18 @@ class ListViewsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListViewsResponse(proto.Message): @@ -616,8 +759,15 @@ class ListViewsResponse(proto.Message): def raw_page(self): return self - views = proto.RepeatedField(proto.MESSAGE, number=1, message="LogView",) - next_page_token = proto.Field(proto.STRING, number=2,) + views = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogView", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class CreateViewRequest(proto.Message): @@ -640,9 +790,19 @@ class CreateViewRequest(proto.Message): Required. The new view. """ - parent = proto.Field(proto.STRING, number=1,) - view_id = proto.Field(proto.STRING, number=2,) - view = proto.Field(proto.MESSAGE, number=3, message="LogView",) + parent = proto.Field( + proto.STRING, + number=1, + ) + view_id = proto.Field( + proto.STRING, + number=2, + ) + view = proto.Field( + proto.MESSAGE, + number=3, + message="LogView", + ) class UpdateViewRequest(proto.Message): @@ -673,10 +833,19 @@ class UpdateViewRequest(proto.Message): For example: ``updateMask=filter`` """ - name = proto.Field(proto.STRING, number=1,) - view = proto.Field(proto.MESSAGE, number=2, message="LogView",) + name = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.MESSAGE, + number=2, + message="LogView", + ) update_mask = proto.Field( - proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, ) @@ -696,7 +865,10 @@ class GetViewRequest(proto.Message): ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class DeleteViewRequest(proto.Message): @@ -717,7 +889,10 @@ class DeleteViewRequest(proto.Message): `"projects/my-project/locations/global/buckets/my-bucket/views/my-view"` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class ListSinksRequest(proto.Message): @@ -746,9 +921,18 @@ class ListSinksRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListSinksResponse(proto.Message): @@ -768,8 +952,15 @@ class ListSinksResponse(proto.Message): def raw_page(self): return self - sinks = proto.RepeatedField(proto.MESSAGE, number=1, message="LogSink",) - next_page_token = proto.Field(proto.STRING, number=2,) + sinks = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogSink", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class GetSinkRequest(proto.Message): @@ -791,7 +982,10 @@ class GetSinkRequest(proto.Message): ``"projects/my-project/sinks/my-sink"`` """ - sink_name = proto.Field(proto.STRING, number=1,) + sink_name = proto.Field( + proto.STRING, + number=1, + ) class CreateSinkRequest(proto.Message): @@ -832,9 +1026,19 @@ class CreateSinkRequest(proto.Message): [LogSink][google.logging.v2.LogSink]. """ - parent = proto.Field(proto.STRING, number=1,) - sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - unique_writer_identity = proto.Field(proto.BOOL, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + sink = proto.Field( + proto.MESSAGE, + number=2, + message="LogSink", + ) + unique_writer_identity = proto.Field( + proto.BOOL, + number=3, + ) class UpdateSinkRequest(proto.Message): @@ -894,11 +1098,23 @@ class UpdateSinkRequest(proto.Message): For example: ``updateMask=filter`` """ - sink_name = proto.Field(proto.STRING, number=1,) - sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - unique_writer_identity = proto.Field(proto.BOOL, number=3,) + sink_name = proto.Field( + proto.STRING, + number=1, + ) + sink = proto.Field( + proto.MESSAGE, + number=2, + message="LogSink", + ) + unique_writer_identity = proto.Field( + proto.BOOL, + number=3, + ) update_mask = proto.Field( - proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, ) @@ -922,7 +1138,10 @@ class DeleteSinkRequest(proto.Message): ``"projects/my-project/sinks/my-sink"`` """ - sink_name = proto.Field(proto.STRING, number=1,) + sink_name = proto.Field( + proto.STRING, + number=1, + ) class LogExclusion(proto.Message): @@ -971,12 +1190,32 @@ class LogExclusion(proto.Message): exclusions. """ - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=2,) - filter = proto.Field(proto.STRING, number=3,) - disabled = proto.Field(proto.BOOL, number=4,) - create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) - update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + disabled = proto.Field( + proto.BOOL, + number=4, + ) + create_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) class ListExclusionsRequest(proto.Message): @@ -1006,9 +1245,18 @@ class ListExclusionsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListExclusionsResponse(proto.Message): @@ -1028,8 +1276,15 @@ class ListExclusionsResponse(proto.Message): def raw_page(self): return self - exclusions = proto.RepeatedField(proto.MESSAGE, number=1, message="LogExclusion",) - next_page_token = proto.Field(proto.STRING, number=2,) + exclusions = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogExclusion", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class GetExclusionRequest(proto.Message): @@ -1051,7 +1306,10 @@ class GetExclusionRequest(proto.Message): ``"projects/my-project/exclusions/my-exclusion"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class CreateExclusionRequest(proto.Message): @@ -1079,8 +1337,15 @@ class CreateExclusionRequest(proto.Message): resource. """ - parent = proto.Field(proto.STRING, number=1,) - exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) + parent = proto.Field( + proto.STRING, + number=1, + ) + exclusion = proto.Field( + proto.MESSAGE, + number=2, + message="LogExclusion", + ) class UpdateExclusionRequest(proto.Message): @@ -1116,10 +1381,19 @@ class UpdateExclusionRequest(proto.Message): ``"filter,description"``. """ - name = proto.Field(proto.STRING, number=1,) - exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) + name = proto.Field( + proto.STRING, + number=1, + ) + exclusion = proto.Field( + proto.MESSAGE, + number=2, + message="LogExclusion", + ) update_mask = proto.Field( - proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, ) @@ -1143,7 +1417,10 @@ class DeleteExclusionRequest(proto.Message): ``"projects/my-project/exclusions/my-exclusion"`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class GetCmekSettingsRequest(proto.Message): @@ -1175,7 +1452,10 @@ class GetCmekSettingsRequest(proto.Message): projects and folders in the Google Cloud organization. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class UpdateCmekSettingsRequest(proto.Message): @@ -1223,10 +1503,19 @@ class UpdateCmekSettingsRequest(proto.Message): For example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field(proto.STRING, number=1,) - cmek_settings = proto.Field(proto.MESSAGE, number=2, message="CmekSettings",) + name = proto.Field( + proto.STRING, + number=1, + ) + cmek_settings = proto.Field( + proto.MESSAGE, + number=2, + message="CmekSettings", + ) update_mask = proto.Field( - proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, ) @@ -1296,9 +1585,18 @@ class CmekSettings(proto.Message): for more information. """ - name = proto.Field(proto.STRING, number=1,) - kms_key_name = proto.Field(proto.STRING, number=2,) - service_account_id = proto.Field(proto.STRING, number=3,) + name = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + service_account_id = proto.Field( + proto.STRING, + number=3, + ) class GetSettingsRequest(proto.Message): @@ -1331,7 +1629,10 @@ class GetSettingsRequest(proto.Message): and folders in the Google Cloud organization. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class UpdateSettingsRequest(proto.Message): @@ -1376,10 +1677,19 @@ class UpdateSettingsRequest(proto.Message): For example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field(proto.STRING, number=1,) - settings = proto.Field(proto.MESSAGE, number=2, message="Settings",) + name = proto.Field( + proto.STRING, + number=1, + ) + settings = proto.Field( + proto.MESSAGE, + number=2, + message="Settings", + ) update_mask = proto.Field( - proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, ) @@ -1453,11 +1763,26 @@ class Settings(proto.Message): manually if needed. """ - name = proto.Field(proto.STRING, number=1,) - kms_key_name = proto.Field(proto.STRING, number=2,) - kms_service_account_id = proto.Field(proto.STRING, number=3,) - storage_location = proto.Field(proto.STRING, number=4,) - disable_default_sink = proto.Field(proto.BOOL, number=5,) + name = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + kms_service_account_id = proto.Field( + proto.STRING, + number=3, + ) + storage_location = proto.Field( + proto.STRING, + number=4, + ) + disable_default_sink = proto.Field( + proto.BOOL, + number=5, + ) class CopyLogEntriesRequest(proto.Message): @@ -1480,9 +1805,18 @@ class CopyLogEntriesRequest(proto.Message): entries. """ - name = proto.Field(proto.STRING, number=1,) - filter = proto.Field(proto.STRING, number=3,) - destination = proto.Field(proto.STRING, number=4,) + name = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + destination = proto.Field( + proto.STRING, + number=4, + ) class CopyLogEntriesMetadata(proto.Message): @@ -1513,13 +1847,38 @@ class CopyLogEntriesMetadata(proto.Message): For example: ``"serviceAccount:foo@bar.com"`` """ - start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - state = proto.Field(proto.ENUM, number=3, enum="OperationState",) - cancellation_requested = proto.Field(proto.BOOL, number=4,) - request = proto.Field(proto.MESSAGE, number=5, message="CopyLogEntriesRequest",) - progress = proto.Field(proto.INT32, number=6,) - writer_identity = proto.Field(proto.STRING, number=7,) + start_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.ENUM, + number=3, + enum="OperationState", + ) + cancellation_requested = proto.Field( + proto.BOOL, + number=4, + ) + request = proto.Field( + proto.MESSAGE, + number=5, + message="CopyLogEntriesRequest", + ) + progress = proto.Field( + proto.INT32, + number=6, + ) + writer_identity = proto.Field( + proto.STRING, + number=7, + ) class CopyLogEntriesResponse(proto.Message): @@ -1530,7 +1889,10 @@ class CopyLogEntriesResponse(proto.Message): Number of log entries copied. """ - log_entries_copied_count = proto.Field(proto.INT64, number=1,) + log_entries_copied_count = proto.Field( + proto.INT64, + number=1, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging_metrics.py b/google/cloud/logging_v2/types/logging_metrics.py index af1f2f548..323599423 100644 --- a/google/cloud/logging_v2/types/logging_metrics.py +++ b/google/cloud/logging_v2/types/logging_metrics.py @@ -170,23 +170,56 @@ class ApiVersion(proto.Enum): V2 = 0 V1 = 1 - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=2,) - filter = proto.Field(proto.STRING, number=3,) - disabled = proto.Field(proto.BOOL, number=12,) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + disabled = proto.Field( + proto.BOOL, + number=12, + ) metric_descriptor = proto.Field( - proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, + proto.MESSAGE, + number=5, + message=metric_pb2.MetricDescriptor, + ) + value_extractor = proto.Field( + proto.STRING, + number=6, + ) + label_extractors = proto.MapField( + proto.STRING, + proto.STRING, + number=7, ) - value_extractor = proto.Field(proto.STRING, number=6,) - label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7,) bucket_options = proto.Field( - proto.MESSAGE, number=8, message=distribution_pb2.Distribution.BucketOptions, + proto.MESSAGE, + number=8, + message=distribution_pb2.Distribution.BucketOptions, + ) + create_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, ) - create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) update_time = proto.Field( - proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + version = proto.Field( + proto.ENUM, + number=4, + enum=ApiVersion, ) - version = proto.Field(proto.ENUM, number=4, enum=ApiVersion,) class ListLogMetricsRequest(proto.Message): @@ -212,9 +245,18 @@ class ListLogMetricsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1,) - page_token = proto.Field(proto.STRING, number=2,) - page_size = proto.Field(proto.INT32, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) class ListLogMetricsResponse(proto.Message): @@ -234,8 +276,15 @@ class ListLogMetricsResponse(proto.Message): def raw_page(self): return self - metrics = proto.RepeatedField(proto.MESSAGE, number=1, message="LogMetric",) - next_page_token = proto.Field(proto.STRING, number=2,) + metrics = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="LogMetric", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class GetLogMetricRequest(proto.Message): @@ -250,7 +299,10 @@ class GetLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1,) + metric_name = proto.Field( + proto.STRING, + number=1, + ) class CreateLogMetricRequest(proto.Message): @@ -271,8 +323,15 @@ class CreateLogMetricRequest(proto.Message): must not have an identifier that already exists. """ - parent = proto.Field(proto.STRING, number=1,) - metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) + parent = proto.Field( + proto.STRING, + number=1, + ) + metric = proto.Field( + proto.MESSAGE, + number=2, + message="LogMetric", + ) class UpdateLogMetricRequest(proto.Message): @@ -294,8 +353,15 @@ class UpdateLogMetricRequest(proto.Message): Required. The updated metric. """ - metric_name = proto.Field(proto.STRING, number=1,) - metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) + metric_name = proto.Field( + proto.STRING, + number=1, + ) + metric = proto.Field( + proto.MESSAGE, + number=2, + message="LogMetric", + ) class DeleteLogMetricRequest(proto.Message): @@ -310,7 +376,10 @@ class DeleteLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1,) + metric_name = proto.Field( + proto.STRING, + number=1, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 96ef7ee7e..d2f8f0e56 100644 --- a/noxfile.py +++ b/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -57,7 +57,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -67,7 +69,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 4c808af73..949e0fde9 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 401394deb..75227b5b3 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -94,7 +94,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] + "client_class", + [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, + ], ) def test_config_service_v2_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -136,7 +140,11 @@ def test_config_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] + "client_class", + [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, + ], ) def test_config_service_v2_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -510,7 +518,9 @@ def test_config_service_v2_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -655,10 +665,17 @@ def test_config_service_v2_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [logging_config.ListBucketsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListBucketsRequest, + dict, + ], +) def test_list_buckets(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -687,7 +704,8 @@ def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -703,7 +721,8 @@ async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -714,7 +733,9 @@ async def test_list_buckets_async( with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListBucketsResponse(next_page_token="next_page_token_value",) + logging_config.ListBucketsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_buckets(request) @@ -734,7 +755,9 @@ async def test_list_buckets_async_from_dict(): def test_list_buckets_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -754,7 +777,10 @@ def test_list_buckets_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -783,11 +809,16 @@ async def test_list_buckets_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_buckets_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -795,7 +826,9 @@ def test_list_buckets_flattened(): call.return_value = logging_config.ListBucketsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_buckets(parent="parent_value",) + client.list_buckets( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -807,13 +840,16 @@ def test_list_buckets_flattened(): def test_list_buckets_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_buckets( - logging_config.ListBucketsRequest(), parent="parent_value", + logging_config.ListBucketsRequest(), + parent="parent_value", ) @@ -833,7 +869,9 @@ async def test_list_buckets_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_buckets(parent="parent_value",) + response = await client.list_buckets( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -854,13 +892,15 @@ async def test_list_buckets_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_buckets( - logging_config.ListBucketsRequest(), parent="parent_value", + logging_config.ListBucketsRequest(), + parent="parent_value", ) def test_list_buckets_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -875,12 +915,21 @@ def test_list_buckets_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(),], next_page_token="ghi", + buckets=[], + next_page_token="def", + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token="ghi", ), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], ), RuntimeError, ) @@ -900,7 +949,8 @@ def test_list_buckets_pager(transport_name: str = "grpc"): def test_list_buckets_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -915,12 +965,21 @@ def test_list_buckets_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(),], next_page_token="ghi", + buckets=[], + next_page_token="def", + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token="ghi", ), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], ), RuntimeError, ) @@ -949,16 +1008,27 @@ async def test_list_buckets_async_pager(): ], next_page_token="abc", ), - logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(),], next_page_token="ghi", + buckets=[], + next_page_token="def", + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token="ghi", ), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], ), RuntimeError, ) - async_pager = await client.list_buckets(request={},) + async_pager = await client.list_buckets( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -988,12 +1058,21 @@ async def test_list_buckets_async_pages(): ], next_page_token="abc", ), - logging_config.ListBucketsResponse(buckets=[], next_page_token="def",), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(),], next_page_token="ghi", + buckets=[], + next_page_token="def", + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token="ghi", ), logging_config.ListBucketsResponse( - buckets=[logging_config.LogBucket(), logging_config.LogBucket(),], + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], ), RuntimeError, ) @@ -1004,10 +1083,17 @@ async def test_list_buckets_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_config.GetBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetBucketRequest, + dict, + ], +) def test_get_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1046,7 +1132,8 @@ def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1062,7 +1149,8 @@ async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1105,7 +1193,9 @@ async def test_get_bucket_async_from_dict(): def test_get_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1125,7 +1215,10 @@ def test_get_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1154,13 +1247,23 @@ async def test_get_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.CreateBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateBucketRequest, + dict, + ], +) def test_create_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1199,7 +1302,8 @@ def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1215,7 +1319,8 @@ async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1258,7 +1363,9 @@ async def test_create_bucket_async_from_dict(): def test_create_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1278,7 +1385,10 @@ def test_create_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1307,13 +1417,23 @@ async def test_create_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.UpdateBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateBucketRequest, + dict, + ], +) def test_update_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1352,7 +1472,8 @@ def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1368,7 +1489,8 @@ async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1411,7 +1533,9 @@ async def test_update_bucket_async_from_dict(): def test_update_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1431,7 +1555,10 @@ def test_update_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1460,13 +1587,23 @@ async def test_update_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.DeleteBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteBucketRequest, + dict, + ], +) def test_delete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1492,7 +1629,8 @@ def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1508,7 +1646,8 @@ async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1536,7 +1675,9 @@ async def test_delete_bucket_async_from_dict(): def test_delete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1556,7 +1697,10 @@ def test_delete_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1583,13 +1727,23 @@ async def test_delete_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.UndeleteBucketRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UndeleteBucketRequest, + dict, + ], +) def test_undelete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1615,7 +1769,8 @@ def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1631,7 +1786,8 @@ async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1659,7 +1815,9 @@ async def test_undelete_bucket_async_from_dict(): def test_undelete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1679,7 +1837,10 @@ def test_undelete_bucket_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1706,13 +1867,23 @@ async def test_undelete_bucket_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.ListViewsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListViewsRequest, + dict, + ], +) def test_list_views(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1741,7 +1912,8 @@ def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1757,7 +1929,8 @@ async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1768,7 +1941,9 @@ async def test_list_views_async( with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListViewsResponse(next_page_token="next_page_token_value",) + logging_config.ListViewsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_views(request) @@ -1788,7 +1963,9 @@ async def test_list_views_async_from_dict(): def test_list_views_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1808,7 +1985,10 @@ def test_list_views_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1837,11 +2017,16 @@ async def test_list_views_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_views_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1849,7 +2034,9 @@ def test_list_views_flattened(): call.return_value = logging_config.ListViewsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_views(parent="parent_value",) + client.list_views( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1861,13 +2048,16 @@ def test_list_views_flattened(): def test_list_views_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_views( - logging_config.ListViewsRequest(), parent="parent_value", + logging_config.ListViewsRequest(), + parent="parent_value", ) @@ -1887,7 +2077,9 @@ async def test_list_views_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_views(parent="parent_value",) + response = await client.list_views( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1908,13 +2100,15 @@ async def test_list_views_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_views( - logging_config.ListViewsRequest(), parent="parent_value", + logging_config.ListViewsRequest(), + parent="parent_value", ) def test_list_views_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1929,12 +2123,21 @@ def test_list_views_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListViewsResponse(views=[], next_page_token="def",), logging_config.ListViewsResponse( - views=[logging_config.LogView(),], next_page_token="ghi", + views=[], + next_page_token="def", ), logging_config.ListViewsResponse( - views=[logging_config.LogView(), logging_config.LogView(),], + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], ), RuntimeError, ) @@ -1954,7 +2157,8 @@ def test_list_views_pager(transport_name: str = "grpc"): def test_list_views_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1969,12 +2173,21 @@ def test_list_views_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListViewsResponse(views=[], next_page_token="def",), logging_config.ListViewsResponse( - views=[logging_config.LogView(),], next_page_token="ghi", + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", ), logging_config.ListViewsResponse( - views=[logging_config.LogView(), logging_config.LogView(),], + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], ), RuntimeError, ) @@ -2003,16 +2216,27 @@ async def test_list_views_async_pager(): ], next_page_token="abc", ), - logging_config.ListViewsResponse(views=[], next_page_token="def",), logging_config.ListViewsResponse( - views=[logging_config.LogView(),], next_page_token="ghi", + views=[], + next_page_token="def", ), logging_config.ListViewsResponse( - views=[logging_config.LogView(), logging_config.LogView(),], + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], ), RuntimeError, ) - async_pager = await client.list_views(request={},) + async_pager = await client.list_views( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2042,12 +2266,21 @@ async def test_list_views_async_pages(): ], next_page_token="abc", ), - logging_config.ListViewsResponse(views=[], next_page_token="def",), logging_config.ListViewsResponse( - views=[logging_config.LogView(),], next_page_token="ghi", + views=[], + next_page_token="def", + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token="ghi", ), logging_config.ListViewsResponse( - views=[logging_config.LogView(), logging_config.LogView(),], + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], ), RuntimeError, ) @@ -2058,10 +2291,17 @@ async def test_list_views_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_config.GetViewRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetViewRequest, + dict, + ], +) def test_get_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2072,7 +2312,9 @@ def test_get_view(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.get_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name="name_value", description="description_value", filter="filter_value", + name="name_value", + description="description_value", + filter="filter_value", ) response = client.get_view(request) @@ -2092,7 +2334,8 @@ def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2108,7 +2351,8 @@ async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2145,7 +2389,9 @@ async def test_get_view_async_from_dict(): def test_get_view_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2165,7 +2411,10 @@ def test_get_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2194,13 +2443,23 @@ async def test_get_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.CreateViewRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateViewRequest, + dict, + ], +) def test_create_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2211,7 +2470,9 @@ def test_create_view(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.create_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name="name_value", description="description_value", filter="filter_value", + name="name_value", + description="description_value", + filter="filter_value", ) response = client.create_view(request) @@ -2231,7 +2492,8 @@ def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2247,7 +2509,8 @@ async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2284,7 +2547,9 @@ async def test_create_view_async_from_dict(): def test_create_view_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2304,7 +2569,10 @@ def test_create_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2333,13 +2601,23 @@ async def test_create_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.UpdateViewRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateViewRequest, + dict, + ], +) def test_update_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2350,7 +2628,9 @@ def test_update_view(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.update_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( - name="name_value", description="description_value", filter="filter_value", + name="name_value", + description="description_value", + filter="filter_value", ) response = client.update_view(request) @@ -2370,7 +2650,8 @@ def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2386,7 +2667,8 @@ async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2423,7 +2705,9 @@ async def test_update_view_async_from_dict(): def test_update_view_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2443,7 +2727,10 @@ def test_update_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2472,13 +2759,23 @@ async def test_update_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.DeleteViewRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteViewRequest, + dict, + ], +) def test_delete_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2504,7 +2801,8 @@ def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2520,7 +2818,8 @@ async def test_delete_view_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2548,7 +2847,9 @@ async def test_delete_view_async_from_dict(): def test_delete_view_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2568,7 +2869,10 @@ def test_delete_view_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2595,13 +2899,23 @@ async def test_delete_view_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.ListSinksRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListSinksRequest, + dict, + ], +) def test_list_sinks(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2630,7 +2944,8 @@ def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2646,7 +2961,8 @@ async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2657,7 +2973,9 @@ async def test_list_sinks_async( with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging_config.ListSinksResponse(next_page_token="next_page_token_value",) + logging_config.ListSinksResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_sinks(request) @@ -2677,7 +2995,9 @@ async def test_list_sinks_async_from_dict(): def test_list_sinks_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2697,7 +3017,10 @@ def test_list_sinks_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2726,11 +3049,16 @@ async def test_list_sinks_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_sinks_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2738,7 +3066,9 @@ def test_list_sinks_flattened(): call.return_value = logging_config.ListSinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_sinks(parent="parent_value",) + client.list_sinks( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2750,13 +3080,16 @@ def test_list_sinks_flattened(): def test_list_sinks_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_sinks( - logging_config.ListSinksRequest(), parent="parent_value", + logging_config.ListSinksRequest(), + parent="parent_value", ) @@ -2776,7 +3109,9 @@ async def test_list_sinks_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_sinks(parent="parent_value",) + response = await client.list_sinks( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2797,13 +3132,15 @@ async def test_list_sinks_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_sinks( - logging_config.ListSinksRequest(), parent="parent_value", + logging_config.ListSinksRequest(), + parent="parent_value", ) def test_list_sinks_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2818,12 +3155,21 @@ def test_list_sinks_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListSinksResponse(sinks=[], next_page_token="def",), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(),], next_page_token="ghi", + sinks=[], + next_page_token="def", ), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(), logging_config.LogSink(),], + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), RuntimeError, ) @@ -2843,7 +3189,8 @@ def test_list_sinks_pager(transport_name: str = "grpc"): def test_list_sinks_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2858,12 +3205,21 @@ def test_list_sinks_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_config.ListSinksResponse(sinks=[], next_page_token="def",), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(),], next_page_token="ghi", + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", ), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(), logging_config.LogSink(),], + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), RuntimeError, ) @@ -2892,16 +3248,27 @@ async def test_list_sinks_async_pager(): ], next_page_token="abc", ), - logging_config.ListSinksResponse(sinks=[], next_page_token="def",), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(),], next_page_token="ghi", + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", ), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(), logging_config.LogSink(),], + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), RuntimeError, ) - async_pager = await client.list_sinks(request={},) + async_pager = await client.list_sinks( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2931,12 +3298,21 @@ async def test_list_sinks_async_pages(): ], next_page_token="abc", ), - logging_config.ListSinksResponse(sinks=[], next_page_token="def",), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(),], next_page_token="ghi", + sinks=[], + next_page_token="def", + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token="ghi", ), logging_config.ListSinksResponse( - sinks=[logging_config.LogSink(), logging_config.LogSink(),], + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], ), RuntimeError, ) @@ -2947,10 +3323,17 @@ async def test_list_sinks_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_config.GetSinkRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSinkRequest, + dict, + ], +) def test_get_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2996,7 +3379,8 @@ def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3012,7 +3396,8 @@ async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3059,7 +3444,9 @@ async def test_get_sink_async_from_dict(): def test_get_sink_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3079,7 +3466,10 @@ def test_get_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3108,11 +3498,16 @@ async def test_get_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] def test_get_sink_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3120,7 +3515,9 @@ def test_get_sink_flattened(): call.return_value = logging_config.LogSink() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_sink(sink_name="sink_name_value",) + client.get_sink( + sink_name="sink_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3132,13 +3529,16 @@ def test_get_sink_flattened(): def test_get_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_sink( - logging_config.GetSinkRequest(), sink_name="sink_name_value", + logging_config.GetSinkRequest(), + sink_name="sink_name_value", ) @@ -3158,7 +3558,9 @@ async def test_get_sink_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_sink(sink_name="sink_name_value",) + response = await client.get_sink( + sink_name="sink_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3179,14 +3581,22 @@ async def test_get_sink_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_sink( - logging_config.GetSinkRequest(), sink_name="sink_name_value", + logging_config.GetSinkRequest(), + sink_name="sink_name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.CreateSinkRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateSinkRequest, + dict, + ], +) def test_create_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3232,7 +3642,8 @@ def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3248,7 +3659,8 @@ async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3295,7 +3707,9 @@ async def test_create_sink_async_from_dict(): def test_create_sink_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3315,7 +3729,10 @@ def test_create_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3344,11 +3761,16 @@ async def test_create_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_sink_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3357,7 +3779,8 @@ def test_create_sink_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_sink( - parent="parent_value", sink=logging_config.LogSink(name="name_value"), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3373,7 +3796,9 @@ def test_create_sink_flattened(): def test_create_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3402,7 +3827,8 @@ async def test_create_sink_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_sink( - parent="parent_value", sink=logging_config.LogSink(name="name_value"), + parent="parent_value", + sink=logging_config.LogSink(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -3433,10 +3859,17 @@ async def test_create_sink_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.UpdateSinkRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSinkRequest, + dict, + ], +) def test_update_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3482,7 +3915,8 @@ def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3498,7 +3932,8 @@ async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3545,7 +3980,9 @@ async def test_update_sink_async_from_dict(): def test_update_sink_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3565,7 +4002,10 @@ def test_update_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3594,11 +4034,16 @@ async def test_update_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] def test_update_sink_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -3628,7 +4073,9 @@ def test_update_sink_flattened(): def test_update_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3695,10 +4142,17 @@ async def test_update_sink_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.DeleteSinkRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteSinkRequest, + dict, + ], +) def test_delete_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3724,7 +4178,8 @@ def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3740,7 +4195,8 @@ async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3768,7 +4224,9 @@ async def test_delete_sink_async_from_dict(): def test_delete_sink_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3788,7 +4246,10 @@ def test_delete_sink_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -3815,11 +4276,16 @@ async def test_delete_sink_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "sink_name=sink_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "sink_name=sink_name/value", + ) in kw["metadata"] def test_delete_sink_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -3827,7 +4293,9 @@ def test_delete_sink_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_sink(sink_name="sink_name_value",) + client.delete_sink( + sink_name="sink_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3839,13 +4307,16 @@ def test_delete_sink_flattened(): def test_delete_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_sink( - logging_config.DeleteSinkRequest(), sink_name="sink_name_value", + logging_config.DeleteSinkRequest(), + sink_name="sink_name_value", ) @@ -3863,7 +4334,9 @@ async def test_delete_sink_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_sink(sink_name="sink_name_value",) + response = await client.delete_sink( + sink_name="sink_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3884,14 +4357,22 @@ async def test_delete_sink_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_sink( - logging_config.DeleteSinkRequest(), sink_name="sink_name_value", + logging_config.DeleteSinkRequest(), + sink_name="sink_name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.ListExclusionsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.ListExclusionsRequest, + dict, + ], +) def test_list_exclusions(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3920,7 +4401,8 @@ def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3936,7 +4418,8 @@ async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3969,7 +4452,9 @@ async def test_list_exclusions_async_from_dict(): def test_list_exclusions_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3989,7 +4474,10 @@ def test_list_exclusions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4018,11 +4506,16 @@ async def test_list_exclusions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_exclusions_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4030,7 +4523,9 @@ def test_list_exclusions_flattened(): call.return_value = logging_config.ListExclusionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_exclusions(parent="parent_value",) + client.list_exclusions( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4042,13 +4537,16 @@ def test_list_exclusions_flattened(): def test_list_exclusions_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_exclusions( - logging_config.ListExclusionsRequest(), parent="parent_value", + logging_config.ListExclusionsRequest(), + parent="parent_value", ) @@ -4068,7 +4566,9 @@ async def test_list_exclusions_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_exclusions(parent="parent_value",) + response = await client.list_exclusions( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4089,13 +4589,15 @@ async def test_list_exclusions_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_exclusions( - logging_config.ListExclusionsRequest(), parent="parent_value", + logging_config.ListExclusionsRequest(), + parent="parent_value", ) def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4111,10 +4613,14 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): next_page_token="abc", ), logging_config.ListExclusionsResponse( - exclusions=[], next_page_token="def", + exclusions=[], + next_page_token="def", ), logging_config.ListExclusionsResponse( - exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -4140,7 +4646,8 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4156,10 +4663,14 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): next_page_token="abc", ), logging_config.ListExclusionsResponse( - exclusions=[], next_page_token="def", + exclusions=[], + next_page_token="def", ), logging_config.ListExclusionsResponse( - exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -4195,10 +4706,14 @@ async def test_list_exclusions_async_pager(): next_page_token="abc", ), logging_config.ListExclusionsResponse( - exclusions=[], next_page_token="def", + exclusions=[], + next_page_token="def", ), logging_config.ListExclusionsResponse( - exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -4208,7 +4723,9 @@ async def test_list_exclusions_async_pager(): ), RuntimeError, ) - async_pager = await client.list_exclusions(request={},) + async_pager = await client.list_exclusions( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -4239,10 +4756,14 @@ async def test_list_exclusions_async_pages(): next_page_token="abc", ), logging_config.ListExclusionsResponse( - exclusions=[], next_page_token="def", + exclusions=[], + next_page_token="def", ), logging_config.ListExclusionsResponse( - exclusions=[logging_config.LogExclusion(),], next_page_token="ghi", + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token="ghi", ), logging_config.ListExclusionsResponse( exclusions=[ @@ -4259,10 +4780,17 @@ async def test_list_exclusions_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_config.GetExclusionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetExclusionRequest, + dict, + ], +) def test_get_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4297,7 +4825,8 @@ def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4313,7 +4842,8 @@ async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4352,7 +4882,9 @@ async def test_get_exclusion_async_from_dict(): def test_get_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4372,7 +4904,10 @@ def test_get_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4401,11 +4936,16 @@ async def test_get_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4413,7 +4953,9 @@ def test_get_exclusion_flattened(): call.return_value = logging_config.LogExclusion() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_exclusion(name="name_value",) + client.get_exclusion( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4425,13 +4967,16 @@ def test_get_exclusion_flattened(): def test_get_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_exclusion( - logging_config.GetExclusionRequest(), name="name_value", + logging_config.GetExclusionRequest(), + name="name_value", ) @@ -4451,7 +4996,9 @@ async def test_get_exclusion_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_exclusion(name="name_value",) + response = await client.get_exclusion( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -4472,14 +5019,22 @@ async def test_get_exclusion_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_exclusion( - logging_config.GetExclusionRequest(), name="name_value", + logging_config.GetExclusionRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.CreateExclusionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CreateExclusionRequest, + dict, + ], +) def test_create_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4514,7 +5069,8 @@ def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4530,7 +5086,8 @@ async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4569,7 +5126,9 @@ async def test_create_exclusion_async_from_dict(): def test_create_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4589,7 +5148,10 @@ def test_create_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4618,11 +5180,16 @@ async def test_create_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -4648,7 +5215,9 @@ def test_create_exclusion_flattened(): def test_create_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4709,10 +5278,17 @@ async def test_create_exclusion_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.UpdateExclusionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateExclusionRequest, + dict, + ], +) def test_update_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4747,7 +5323,8 @@ def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4763,7 +5340,8 @@ async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4802,7 +5380,9 @@ async def test_update_exclusion_async_from_dict(): def test_update_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4822,7 +5402,10 @@ def test_update_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -4851,11 +5434,16 @@ async def test_update_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_update_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -4885,7 +5473,9 @@ def test_update_exclusion_flattened(): def test_update_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4952,10 +5542,17 @@ async def test_update_exclusion_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.DeleteExclusionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.DeleteExclusionRequest, + dict, + ], +) def test_delete_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4981,7 +5578,8 @@ def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4997,7 +5595,8 @@ async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5025,7 +5624,9 @@ async def test_delete_exclusion_async_from_dict(): def test_delete_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5045,7 +5646,10 @@ def test_delete_exclusion_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5072,11 +5676,16 @@ async def test_delete_exclusion_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5084,7 +5693,9 @@ def test_delete_exclusion_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_exclusion(name="name_value",) + client.delete_exclusion( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5096,13 +5707,16 @@ def test_delete_exclusion_flattened(): def test_delete_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_exclusion( - logging_config.DeleteExclusionRequest(), name="name_value", + logging_config.DeleteExclusionRequest(), + name="name_value", ) @@ -5120,7 +5734,9 @@ async def test_delete_exclusion_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_exclusion(name="name_value",) + response = await client.delete_exclusion( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5141,14 +5757,22 @@ async def test_delete_exclusion_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_exclusion( - logging_config.DeleteExclusionRequest(), name="name_value", + logging_config.DeleteExclusionRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.GetCmekSettingsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetCmekSettingsRequest, + dict, + ], +) def test_get_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5183,7 +5807,8 @@ def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5201,7 +5826,8 @@ async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5240,7 +5866,9 @@ async def test_get_cmek_settings_async_from_dict(): def test_get_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5262,7 +5890,10 @@ def test_get_cmek_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5293,15 +5924,23 @@ async def test_get_cmek_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.parametrize( - "request_type", [logging_config.UpdateCmekSettingsRequest, dict,] + "request_type", + [ + logging_config.UpdateCmekSettingsRequest, + dict, + ], ) def test_update_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5336,7 +5975,8 @@ def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5355,7 +5995,8 @@ async def test_update_cmek_settings_async( request_type=logging_config.UpdateCmekSettingsRequest, ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5394,7 +6035,9 @@ async def test_update_cmek_settings_async_from_dict(): def test_update_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5416,7 +6059,10 @@ def test_update_cmek_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5447,13 +6093,23 @@ async def test_update_cmek_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [logging_config.GetSettingsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.GetSettingsRequest, + dict, + ], +) def test_get_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5490,7 +6146,8 @@ def test_get_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5506,7 +6163,8 @@ async def test_get_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5547,7 +6205,9 @@ async def test_get_settings_async_from_dict(): def test_get_settings_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5567,7 +6227,10 @@ def test_get_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5596,11 +6259,16 @@ async def test_get_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_settings_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: @@ -5608,7 +6276,9 @@ def test_get_settings_flattened(): call.return_value = logging_config.Settings() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_settings(name="name_value",) + client.get_settings( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5620,13 +6290,16 @@ def test_get_settings_flattened(): def test_get_settings_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_settings( - logging_config.GetSettingsRequest(), name="name_value", + logging_config.GetSettingsRequest(), + name="name_value", ) @@ -5646,7 +6319,9 @@ async def test_get_settings_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_settings(name="name_value",) + response = await client.get_settings( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -5667,14 +6342,22 @@ async def test_get_settings_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_settings( - logging_config.GetSettingsRequest(), name="name_value", + logging_config.GetSettingsRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [logging_config.UpdateSettingsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.UpdateSettingsRequest, + dict, + ], +) def test_update_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5711,7 +6394,8 @@ def test_update_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5727,7 +6411,8 @@ async def test_update_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5768,7 +6453,9 @@ async def test_update_settings_async_from_dict(): def test_update_settings_field_headers(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -5788,7 +6475,10 @@ def test_update_settings_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -5817,11 +6507,16 @@ async def test_update_settings_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_update_settings_flattened(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: @@ -5847,7 +6542,9 @@ def test_update_settings_flattened(): def test_update_settings_flattened_error(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5908,10 +6605,17 @@ async def test_update_settings_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging_config.CopyLogEntriesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_config.CopyLogEntriesRequest, + dict, + ], +) def test_copy_log_entries(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5937,7 +6641,8 @@ def test_copy_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5953,7 +6658,8 @@ async def test_copy_log_entries_async( transport: str = "grpc_asyncio", request_type=logging_config.CopyLogEntriesRequest ): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5989,7 +6695,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -6009,7 +6716,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = ConfigServiceV2Client(client_options=options, transport=transport,) + client = ConfigServiceV2Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -6025,7 +6735,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -6070,8 +6781,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.ConfigServiceV2GrpcTransport,) + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConfigServiceV2GrpcTransport, + ) def test_config_service_v2_base_transport_error(): @@ -6146,7 +6862,8 @@ def test_config_service_v2_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -6324,7 +7041,8 @@ def test_config_service_v2_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -6336,7 +7054,8 @@ def test_config_service_v2_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -6445,12 +7164,16 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): def test_config_service_v2_grpc_lro_client(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -6458,12 +7181,16 @@ def test_config_service_v2_grpc_lro_client(): def test_config_service_v2_grpc_lro_async_client(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -6471,7 +7198,9 @@ def test_config_service_v2_grpc_lro_async_client(): def test_cmek_settings_path(): project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project,) + expected = "projects/{project}/cmekSettings".format( + project=project, + ) actual = ConfigServiceV2Client.cmek_settings_path(project) assert expected == actual @@ -6492,7 +7221,9 @@ def test_log_bucket_path(): location = "octopus" bucket = "oyster" expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( - project=project, location=location, bucket=bucket, + project=project, + location=location, + bucket=bucket, ) actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) assert expected == actual @@ -6515,7 +7246,8 @@ def test_log_exclusion_path(): project = "winkle" exclusion = "nautilus" expected = "projects/{project}/exclusions/{exclusion}".format( - project=project, exclusion=exclusion, + project=project, + exclusion=exclusion, ) actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) assert expected == actual @@ -6536,7 +7268,10 @@ def test_parse_log_exclusion_path(): def test_log_sink_path(): project = "squid" sink = "clam" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) + expected = "projects/{project}/sinks/{sink}".format( + project=project, + sink=sink, + ) actual = ConfigServiceV2Client.log_sink_path(project, sink) assert expected == actual @@ -6558,8 +7293,13 @@ def test_log_view_path(): location = "nudibranch" bucket = "cuttlefish" view = "mussel" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( - project=project, location=location, bucket=bucket, view=view, + expected = ( + "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( + project=project, + location=location, + bucket=bucket, + view=view, + ) ) actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) assert expected == actual @@ -6581,7 +7321,9 @@ def test_parse_log_view_path(): def test_settings_path(): project = "squid" - expected = "projects/{project}/settings".format(project=project,) + expected = "projects/{project}/settings".format( + project=project, + ) actual = ConfigServiceV2Client.settings_path(project) assert expected == actual @@ -6619,7 +7361,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -6637,7 +7381,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -6655,7 +7401,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -6675,7 +7423,8 @@ def test_common_location_path(): project = "scallop" location = "abalone" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = ConfigServiceV2Client.common_location_path(project, location) assert expected == actual @@ -6700,7 +7449,8 @@ def test_client_with_default_client_info(): transports.ConfigServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6709,7 +7459,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = ConfigServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6717,7 +7468,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e87e1c26d..09cff71ee 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -96,7 +96,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] + "client_class", + [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, + ], ) def test_logging_service_v2_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -138,7 +142,11 @@ def test_logging_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] + "client_class", + [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, + ], ) def test_logging_service_v2_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -512,7 +520,9 @@ def test_logging_service_v2_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -658,10 +668,17 @@ def test_logging_service_v2_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [logging.DeleteLogRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.DeleteLogRequest, + dict, + ], +) def test_delete_log(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -687,7 +704,8 @@ def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -703,7 +721,8 @@ async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -731,7 +750,9 @@ async def test_delete_log_async_from_dict(): def test_delete_log_field_headers(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -751,7 +772,10 @@ def test_delete_log_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "log_name=log_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "log_name=log_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -778,11 +802,16 @@ async def test_delete_log_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "log_name=log_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "log_name=log_name/value", + ) in kw["metadata"] def test_delete_log_flattened(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: @@ -790,7 +819,9 @@ def test_delete_log_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_log(log_name="log_name_value",) + client.delete_log( + log_name="log_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -802,13 +833,16 @@ def test_delete_log_flattened(): def test_delete_log_flattened_error(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_log( - logging.DeleteLogRequest(), log_name="log_name_value", + logging.DeleteLogRequest(), + log_name="log_name_value", ) @@ -826,7 +860,9 @@ async def test_delete_log_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_log(log_name="log_name_value",) + response = await client.delete_log( + log_name="log_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -847,14 +883,22 @@ async def test_delete_log_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_log( - logging.DeleteLogRequest(), log_name="log_name_value", + logging.DeleteLogRequest(), + log_name="log_name_value", ) -@pytest.mark.parametrize("request_type", [logging.WriteLogEntriesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.WriteLogEntriesRequest, + dict, + ], +) def test_write_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -882,7 +926,8 @@ def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -900,7 +945,8 @@ async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -932,7 +978,9 @@ async def test_write_log_entries_async_from_dict(): def test_write_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -968,7 +1016,9 @@ def test_write_log_entries_flattened(): def test_write_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1043,10 +1093,17 @@ async def test_write_log_entries_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [logging.ListLogEntriesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.ListLogEntriesRequest, + dict, + ], +) def test_list_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1075,7 +1132,8 @@ def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1091,7 +1149,8 @@ async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1102,7 +1161,9 @@ async def test_list_log_entries_async( with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - logging.ListLogEntriesResponse(next_page_token="next_page_token_value",) + logging.ListLogEntriesResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_log_entries(request) @@ -1122,7 +1183,9 @@ async def test_list_log_entries_async_from_dict(): def test_list_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1152,7 +1215,9 @@ def test_list_log_entries_flattened(): def test_list_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1221,7 +1286,8 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1236,12 +1302,21 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging.ListLogEntriesResponse(entries=[], next_page_token="def",), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(),], next_page_token="ghi", + entries=[], + next_page_token="def", + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token="ghi", ), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], ), RuntimeError, ) @@ -1258,7 +1333,8 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1273,12 +1349,21 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging.ListLogEntriesResponse(entries=[], next_page_token="def",), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(),], next_page_token="ghi", + entries=[], + next_page_token="def", ), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + entries=[ + log_entry.LogEntry(), + ], + next_page_token="ghi", + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], ), RuntimeError, ) @@ -1307,16 +1392,27 @@ async def test_list_log_entries_async_pager(): ], next_page_token="abc", ), - logging.ListLogEntriesResponse(entries=[], next_page_token="def",), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(),], next_page_token="ghi", + entries=[], + next_page_token="def", ), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + entries=[ + log_entry.LogEntry(), + ], + next_page_token="ghi", + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], ), RuntimeError, ) - async_pager = await client.list_log_entries(request={},) + async_pager = await client.list_log_entries( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1346,12 +1442,21 @@ async def test_list_log_entries_async_pages(): ], next_page_token="abc", ), - logging.ListLogEntriesResponse(entries=[], next_page_token="def",), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(),], next_page_token="ghi", + entries=[], + next_page_token="def", + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token="ghi", ), logging.ListLogEntriesResponse( - entries=[log_entry.LogEntry(), log_entry.LogEntry(),], + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], ), RuntimeError, ) @@ -1363,11 +1468,16 @@ async def test_list_log_entries_async_pages(): @pytest.mark.parametrize( - "request_type", [logging.ListMonitoredResourceDescriptorsRequest, dict,] + "request_type", + [ + logging.ListMonitoredResourceDescriptorsRequest, + dict, + ], ) def test_list_monitored_resource_descriptors(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1398,7 +1508,8 @@ def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1417,7 +1528,8 @@ async def test_list_monitored_resource_descriptors_async( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1453,7 +1565,8 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1471,7 +1584,8 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], next_page_token="def", + resource_descriptors=[], + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -1503,7 +1617,8 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1521,7 +1636,8 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], next_page_token="def", + resource_descriptors=[], + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -1565,7 +1681,8 @@ async def test_list_monitored_resource_descriptors_async_pager(): next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], next_page_token="def", + resource_descriptors=[], + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -1581,7 +1698,9 @@ async def test_list_monitored_resource_descriptors_async_pager(): ), RuntimeError, ) - async_pager = await client.list_monitored_resource_descriptors(request={},) + async_pager = await client.list_monitored_resource_descriptors( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1617,7 +1736,8 @@ async def test_list_monitored_resource_descriptors_async_pages(): next_page_token="abc", ), logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], next_page_token="def", + resource_descriptors=[], + next_page_token="def", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ @@ -1642,10 +1762,17 @@ async def test_list_monitored_resource_descriptors_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging.ListLogsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.ListLogsRequest, + dict, + ], +) def test_list_logs(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1656,7 +1783,8 @@ def test_list_logs(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse( - log_names=["log_names_value"], next_page_token="next_page_token_value", + log_names=["log_names_value"], + next_page_token="next_page_token_value", ) response = client.list_logs(request) @@ -1675,7 +1803,8 @@ def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1691,7 +1820,8 @@ async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1703,7 +1833,8 @@ async def test_list_logs_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.ListLogsResponse( - log_names=["log_names_value"], next_page_token="next_page_token_value", + log_names=["log_names_value"], + next_page_token="next_page_token_value", ) ) response = await client.list_logs(request) @@ -1725,7 +1856,9 @@ async def test_list_logs_async_from_dict(): def test_list_logs_field_headers(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1745,7 +1878,10 @@ def test_list_logs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1774,11 +1910,16 @@ async def test_list_logs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_logs_flattened(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1786,7 +1927,9 @@ def test_list_logs_flattened(): call.return_value = logging.ListLogsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_logs(parent="parent_value",) + client.list_logs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1798,13 +1941,16 @@ def test_list_logs_flattened(): def test_list_logs_flattened_error(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_logs( - logging.ListLogsRequest(), parent="parent_value", + logging.ListLogsRequest(), + parent="parent_value", ) @@ -1824,7 +1970,9 @@ async def test_list_logs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_logs(parent="parent_value",) + response = await client.list_logs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1845,13 +1993,15 @@ async def test_list_logs_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_logs( - logging.ListLogsRequest(), parent="parent_value", + logging.ListLogsRequest(), + parent="parent_value", ) def test_list_logs_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1859,11 +2009,29 @@ def test_list_logs_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( - log_names=[str(), str(), str(),], next_page_token="abc", + log_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + logging.ListLogsResponse( + log_names=[], + next_page_token="def", + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token="ghi", + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], ), - logging.ListLogsResponse(log_names=[], next_page_token="def",), - logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), - logging.ListLogsResponse(log_names=[str(), str(),],), RuntimeError, ) @@ -1882,7 +2050,8 @@ def test_list_logs_pager(transport_name: str = "grpc"): def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1890,11 +2059,29 @@ def test_list_logs_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( - log_names=[str(), str(), str(),], next_page_token="abc", + log_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + logging.ListLogsResponse( + log_names=[], + next_page_token="def", + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token="ghi", + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], ), - logging.ListLogsResponse(log_names=[], next_page_token="def",), - logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), - logging.ListLogsResponse(log_names=[str(), str(),],), RuntimeError, ) pages = list(client.list_logs(request={}).pages) @@ -1915,14 +2102,34 @@ async def test_list_logs_async_pager(): # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( - log_names=[str(), str(), str(),], next_page_token="abc", + log_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + logging.ListLogsResponse( + log_names=[], + next_page_token="def", + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token="ghi", + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], ), - logging.ListLogsResponse(log_names=[], next_page_token="def",), - logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), - logging.ListLogsResponse(log_names=[str(), str(),],), RuntimeError, ) - async_pager = await client.list_logs(request={},) + async_pager = await client.list_logs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1945,11 +2152,29 @@ async def test_list_logs_async_pages(): # Set the response to a series of pages. call.side_effect = ( logging.ListLogsResponse( - log_names=[str(), str(), str(),], next_page_token="abc", + log_names=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + logging.ListLogsResponse( + log_names=[], + next_page_token="def", + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token="ghi", + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], ), - logging.ListLogsResponse(log_names=[], next_page_token="def",), - logging.ListLogsResponse(log_names=[str(),], next_page_token="ghi",), - logging.ListLogsResponse(log_names=[str(), str(),],), RuntimeError, ) pages = [] @@ -1959,10 +2184,17 @@ async def test_list_logs_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging.TailLogEntriesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging.TailLogEntriesRequest, + dict, + ], +) def test_tail_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1991,7 +2223,8 @@ async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2030,7 +2263,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2050,7 +2284,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = LoggingServiceV2Client(client_options=options, transport=transport,) + client = LoggingServiceV2Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -2066,7 +2303,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2111,8 +2349,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.LoggingServiceV2GrpcTransport,) + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LoggingServiceV2GrpcTransport, + ) def test_logging_service_v2_base_transport_error(): @@ -2162,7 +2405,8 @@ def test_logging_service_v2_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2344,7 +2588,8 @@ def test_logging_service_v2_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2356,7 +2601,8 @@ def test_logging_service_v2_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2466,7 +2712,10 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): def test_log_path(): project = "squid" log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log,) + expected = "projects/{project}/logs/{log}".format( + project=project, + log=log, + ) actual = LoggingServiceV2Client.log_path(project, log) assert expected == actual @@ -2505,7 +2754,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = LoggingServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2523,7 +2774,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = LoggingServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2541,7 +2794,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = LoggingServiceV2Client.common_project_path(project) assert expected == actual @@ -2561,7 +2816,8 @@ def test_common_location_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = LoggingServiceV2Client.common_location_path(project, location) assert expected == actual @@ -2586,7 +2842,8 @@ def test_client_with_default_client_info(): transports.LoggingServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2595,7 +2852,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2603,7 +2861,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index e6883889d..5ce917c6e 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -94,7 +94,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] + "client_class", + [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, + ], ) def test_metrics_service_v2_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -136,7 +140,11 @@ def test_metrics_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] + "client_class", + [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, + ], ) def test_metrics_service_v2_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -510,7 +518,9 @@ def test_metrics_service_v2_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -656,10 +666,17 @@ def test_metrics_service_v2_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [logging_metrics.ListLogMetricsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.ListLogMetricsRequest, + dict, + ], +) def test_list_log_metrics(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -688,7 +705,8 @@ def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -704,7 +722,8 @@ async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -737,7 +756,9 @@ async def test_list_log_metrics_async_from_dict(): def test_list_log_metrics_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -757,7 +778,10 @@ def test_list_log_metrics_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -786,11 +810,16 @@ async def test_list_log_metrics_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_log_metrics_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -798,7 +827,9 @@ def test_list_log_metrics_flattened(): call.return_value = logging_metrics.ListLogMetricsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_log_metrics(parent="parent_value",) + client.list_log_metrics( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -810,13 +841,16 @@ def test_list_log_metrics_flattened(): def test_list_log_metrics_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_log_metrics( - logging_metrics.ListLogMetricsRequest(), parent="parent_value", + logging_metrics.ListLogMetricsRequest(), + parent="parent_value", ) @@ -836,7 +870,9 @@ async def test_list_log_metrics_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_log_metrics(parent="parent_value",) + response = await client.list_log_metrics( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -857,13 +893,15 @@ async def test_list_log_metrics_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_log_metrics( - logging_metrics.ListLogMetricsRequest(), parent="parent_value", + logging_metrics.ListLogMetricsRequest(), + parent="parent_value", ) def test_list_log_metrics_pager(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -878,12 +916,21 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + metrics=[], + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token="ghi", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], ), RuntimeError, ) @@ -903,7 +950,8 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): def test_list_log_metrics_pages(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -918,12 +966,21 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + metrics=[], + next_page_token="def", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], ), RuntimeError, ) @@ -952,16 +1009,27 @@ async def test_list_log_metrics_async_pager(): ], next_page_token="abc", ), - logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + metrics=[], + next_page_token="def", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token="ghi", ), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], ), RuntimeError, ) - async_pager = await client.list_log_metrics(request={},) + async_pager = await client.list_log_metrics( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -991,12 +1059,21 @@ async def test_list_log_metrics_async_pages(): ], next_page_token="abc", ), - logging_metrics.ListLogMetricsResponse(metrics=[], next_page_token="def",), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(),], next_page_token="ghi", + metrics=[], + next_page_token="def", ), logging_metrics.ListLogMetricsResponse( - metrics=[logging_metrics.LogMetric(), logging_metrics.LogMetric(),], + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token="ghi", + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], ), RuntimeError, ) @@ -1007,10 +1084,17 @@ async def test_list_log_metrics_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [logging_metrics.GetLogMetricRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + logging_metrics.GetLogMetricRequest, + dict, + ], +) def test_get_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1049,7 +1133,8 @@ def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1065,7 +1150,8 @@ async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1108,7 +1194,9 @@ async def test_get_log_metric_async_from_dict(): def test_get_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1128,7 +1216,10 @@ def test_get_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1157,11 +1248,16 @@ async def test_get_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] def test_get_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: @@ -1169,7 +1265,9 @@ def test_get_log_metric_flattened(): call.return_value = logging_metrics.LogMetric() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_log_metric(metric_name="metric_name_value",) + client.get_log_metric( + metric_name="metric_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1181,13 +1279,16 @@ def test_get_log_metric_flattened(): def test_get_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_log_metric( - logging_metrics.GetLogMetricRequest(), metric_name="metric_name_value", + logging_metrics.GetLogMetricRequest(), + metric_name="metric_name_value", ) @@ -1207,7 +1308,9 @@ async def test_get_log_metric_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_log_metric(metric_name="metric_name_value",) + response = await client.get_log_metric( + metric_name="metric_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1228,16 +1331,22 @@ async def test_get_log_metric_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_log_metric( - logging_metrics.GetLogMetricRequest(), metric_name="metric_name_value", + logging_metrics.GetLogMetricRequest(), + metric_name="metric_name_value", ) @pytest.mark.parametrize( - "request_type", [logging_metrics.CreateLogMetricRequest, dict,] + "request_type", + [ + logging_metrics.CreateLogMetricRequest, + dict, + ], ) def test_create_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1278,7 +1387,8 @@ def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1296,7 +1406,8 @@ async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1341,7 +1452,9 @@ async def test_create_log_metric_async_from_dict(): def test_create_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1363,7 +1476,10 @@ def test_create_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1394,11 +1510,16 @@ async def test_create_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1409,7 +1530,8 @@ def test_create_log_metric_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_log_metric( - parent="parent_value", metric=logging_metrics.LogMetric(name="name_value"), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -1425,7 +1547,9 @@ def test_create_log_metric_flattened(): def test_create_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1456,7 +1580,8 @@ async def test_create_log_metric_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_log_metric( - parent="parent_value", metric=logging_metrics.LogMetric(name="name_value"), + parent="parent_value", + metric=logging_metrics.LogMetric(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -1488,11 +1613,16 @@ async def test_create_log_metric_flattened_error_async(): @pytest.mark.parametrize( - "request_type", [logging_metrics.UpdateLogMetricRequest, dict,] + "request_type", + [ + logging_metrics.UpdateLogMetricRequest, + dict, + ], ) def test_update_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1533,7 +1663,8 @@ def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1551,7 +1682,8 @@ async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1596,7 +1728,9 @@ async def test_update_log_metric_async_from_dict(): def test_update_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1618,7 +1752,10 @@ def test_update_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1649,11 +1786,16 @@ async def test_update_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] def test_update_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1681,7 +1823,9 @@ def test_update_log_metric_flattened(): def test_update_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1745,11 +1889,16 @@ async def test_update_log_metric_flattened_error_async(): @pytest.mark.parametrize( - "request_type", [logging_metrics.DeleteLogMetricRequest, dict,] + "request_type", + [ + logging_metrics.DeleteLogMetricRequest, + dict, + ], ) def test_delete_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1777,7 +1926,8 @@ def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1795,7 +1945,8 @@ async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1825,7 +1976,9 @@ async def test_delete_log_metric_async_from_dict(): def test_delete_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1847,7 +2000,10 @@ def test_delete_log_metric_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1876,11 +2032,16 @@ async def test_delete_log_metric_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "metric_name=metric_name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "metric_name=metric_name/value", + ) in kw["metadata"] def test_delete_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1890,7 +2051,9 @@ def test_delete_log_metric_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_log_metric(metric_name="metric_name_value",) + client.delete_log_metric( + metric_name="metric_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1902,13 +2065,16 @@ def test_delete_log_metric_flattened(): def test_delete_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_log_metric( - logging_metrics.DeleteLogMetricRequest(), metric_name="metric_name_value", + logging_metrics.DeleteLogMetricRequest(), + metric_name="metric_name_value", ) @@ -1928,7 +2094,9 @@ async def test_delete_log_metric_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_log_metric(metric_name="metric_name_value",) + response = await client.delete_log_metric( + metric_name="metric_name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1949,7 +2117,8 @@ async def test_delete_log_metric_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_log_metric( - logging_metrics.DeleteLogMetricRequest(), metric_name="metric_name_value", + logging_metrics.DeleteLogMetricRequest(), + metric_name="metric_name_value", ) @@ -1960,7 +2129,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1980,7 +2150,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = MetricsServiceV2Client(client_options=options, transport=transport,) + client = MetricsServiceV2Client( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1996,7 +2169,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2041,8 +2215,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.MetricsServiceV2GrpcTransport,) + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricsServiceV2GrpcTransport, + ) def test_metrics_service_v2_base_transport_error(): @@ -2091,7 +2270,8 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2273,7 +2453,8 @@ def test_metrics_service_v2_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2285,7 +2466,8 @@ def test_metrics_service_v2_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2396,7 +2578,8 @@ def test_log_metric_path(): project = "squid" metric = "clam" expected = "projects/{project}/metrics/{metric}".format( - project=project, metric=metric, + project=project, + metric=metric, ) actual = MetricsServiceV2Client.log_metric_path(project, metric) assert expected == actual @@ -2436,7 +2619,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = MetricsServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2454,7 +2639,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = MetricsServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2472,7 +2659,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = MetricsServiceV2Client.common_project_path(project) assert expected == actual @@ -2492,7 +2681,8 @@ def test_common_location_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = MetricsServiceV2Client.common_location_path(project, location) assert expected == actual @@ -2517,7 +2707,8 @@ def test_client_with_default_client_info(): transports.MetricsServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2526,7 +2717,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = MetricsServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2534,7 +2726,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index 353e7d2f6..0bcde45de 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -100,7 +100,15 @@ def test_minimal_record(self): import logging filter_obj = self._make_one() - record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) record.created = None success = filter_obj.filter(record) @@ -128,7 +136,15 @@ def test_record_with_request(self): import logging filter_obj = self._make_one() - record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) record.created = None expected_path = "http://testserver/123" @@ -170,7 +186,15 @@ def test_record_with_traceparent_request(self): import logging filter_obj = self._make_one() - record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) record.created = None expected_path = "http://testserver/123" @@ -368,7 +392,17 @@ def test_emit_minimal(self): handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, None, _GLOBAL_RESOURCE, None, None, None, False, None, None,), + ( + record, + None, + _GLOBAL_RESOURCE, + None, + None, + None, + False, + None, + None, + ), ) def test_emit_manual_field_override(self): @@ -437,7 +471,9 @@ def test_emit_with_custom_formatter(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) logFormatter = logging.Formatter(fmt="%(name)s :: %(levelname)s :: %(message)s") handler.setFormatter(logFormatter) @@ -473,7 +509,9 @@ def test_emit_dict(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) message = {"x": "test"} logname = "logname" @@ -506,7 +544,9 @@ def test_emit_w_json_extras(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) message = "message" json_fields = {"hello": "world"} @@ -541,7 +581,9 @@ def test_emit_with_encoded_json(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) logFormatter = logging.Formatter(fmt='{ "x" : "%(name)s" }') handler.setFormatter(logFormatter) @@ -574,7 +616,9 @@ def test_format_with_arguments(self): client = _Client(self.PROJECT) handler = self._make_one( - client, transport=_Transport, resource=_GLOBAL_RESOURCE, + client, + transport=_Transport, + resource=_GLOBAL_RESOURCE, ) message = "name: %s" name_arg = "Daniel" diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index 5db098c29..5031748f9 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -90,7 +90,15 @@ def test_format_minimal(self): import json handler = self._make_one() - record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + None, + None, + None, + ) record.created = None expected_payload = { "severity": "INFO", @@ -118,7 +126,15 @@ def test_format_with_quotes(self): handler = self._make_one() message = '"test"' expected_result = '\\"test\\"' - record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + message, + None, + None, + ) record.created = None handler.filter(record) result = handler.format(record) @@ -151,7 +167,15 @@ def test_format_with_line_break(self): handler = self._make_one() message = "test\ntest" expected_result = "test\\ntest" - record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) + record = logging.LogRecord( + None, + logging.INFO, + None, + None, + message, + None, + None, + ) record.created = None handler.filter(record) result = handler.format(record) @@ -169,7 +193,13 @@ def test_format_with_custom_formatter(self): message = "test" expected_result = "logname :: INFO :: test" record = logging.LogRecord( - "logname", logging.INFO, None, None, message, None, None, + "logname", + logging.INFO, + None, + None, + message, + None, + None, ) record.created = None handler.filter(record) @@ -187,7 +217,13 @@ def test_dict(self): message = {"x": "test"} expected_result = '"x": "test"' record = logging.LogRecord( - "logname", logging.INFO, None, None, message, None, None, + "logname", + logging.INFO, + None, + None, + message, + None, + None, ) record.created = None handler.filter(record) @@ -206,7 +242,13 @@ def test_encoded_json(self): handler.setFormatter(logFormatter) expected_result = '"name": "logname"' record = logging.LogRecord( - "logname", logging.INFO, None, None, None, None, None, + "logname", + logging.INFO, + None, + None, + None, + None, + None, ) record.created = None handler.filter(record) @@ -225,7 +267,13 @@ def test_format_with_arguments(self): name_arg = "Daniel" expected_result = "name: Daniel" record = logging.LogRecord( - None, logging.INFO, None, None, message, name_arg, None, + None, + logging.INFO, + None, + None, + message, + name_arg, + None, ) record.created = None handler.filter(record) @@ -375,7 +423,13 @@ def test_format_with_json_fields(self): expected_result = "name: Daniel" json_fields = {"hello": "world", "number": 12} record = logging.LogRecord( - None, logging.INFO, None, None, message, name_arg, None, + None, + logging.INFO, + None, + None, + message, + name_arg, + None, ) record.created = None setattr(record, "json_fields", json_fields) diff --git a/tests/unit/handlers/transports/test_background_thread.py b/tests/unit/handlers/transports/test_background_thread.py index 0c547d736..07e1a7e66 100644 --- a/tests/unit/handlers/transports/test_background_thread.py +++ b/tests/unit/handlers/transports/test_background_thread.py @@ -64,7 +64,9 @@ def test_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE) transport.worker.enqueue.assert_called_once_with( - record, message, resource=_GLOBAL_RESOURCE, + record, + message, + resource=_GLOBAL_RESOURCE, ) def test_trace_send(self): @@ -86,7 +88,10 @@ def test_trace_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE, trace=trace) transport.worker.enqueue.assert_called_once_with( - record, message, resource=_GLOBAL_RESOURCE, trace=trace, + record, + message, + resource=_GLOBAL_RESOURCE, + trace=trace, ) def test_span_send(self): @@ -108,7 +113,10 @@ def test_span_send(self): transport.send(record, message, resource=_GLOBAL_RESOURCE, span_id=span_id) transport.worker.enqueue.assert_called_once_with( - record, message, resource=_GLOBAL_RESOURCE, span_id=span_id, + record, + message, + resource=_GLOBAL_RESOURCE, + span_id=span_id, ) def test_flush(self): diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index d8c4bf57e..127c856b4 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -215,7 +215,9 @@ def test_list_sinks(self): ) as call: call.return_value = logging_v2.types.ListSinksResponse(sinks=[sink_msg]) - result = client.list_sinks(self.PARENT_PATH,) + result = client.list_sinks( + self.PARENT_PATH, + ) sinks = list(result) diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 1eae1cda6..597313824 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -443,8 +443,8 @@ def test_log_w_dict_resource(self): def test_log_lowercase_severity(self): """ - lower case severity strings should be accepted - """ + lower case severity strings should be accepted + """ from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, ) @@ -785,7 +785,10 @@ def test_list_entries_explicit(self): }, ) # verify that default filter is 24 hours - LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) + LOG_FILTER = "logName=projects/%s/logs/%s" % ( + self.PROJECT, + self.LOGGER_NAME, + ) combined_filter = ( INPUT_FILTER + " AND " @@ -826,7 +829,10 @@ def test_list_entries_explicit_timestamp(self): self.assertEqual(len(entries), 0) # self.assertEqual(client._listed, LISTED) # check call payload - LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) + LOG_FILTER = "logName=projects/%s/logs/%s" % ( + self.PROJECT, + self.LOGGER_NAME, + ) combined_filter = INPUT_FILTER + " AND " + LOG_FILTER self.assertEqual( client._connection._called_with, @@ -958,7 +964,9 @@ def test_list_entries_folder(self): returned = {"entries": ENTRIES} client._connection = _Connection(returned) - iterator = client.list_entries(resource_names=[f"folder/{FOLDER_ID}"],) + iterator = client.list_entries( + resource_names=[f"folder/{FOLDER_ID}"], + ) entries = list(iterator) # Check the entries. self.assertEqual(len(entries), 1) From 7427a41c986aed809a92cd7cb02a038002a80fe7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 30 Mar 2022 18:09:28 +0200 Subject: [PATCH 17/36] chore(deps): update dependency google-cloud-bigquery to v2.34.3 (#514) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index b234d190b..cc4d87623 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.0.0 -google-cloud-bigquery==2.34.2 +google-cloud-bigquery==2.34.3 google-cloud-storage==2.2.1 google-cloud-pubsub==2.11.0 From 804a6cabdfaf91d5f056875c8b5ba070dc5307f2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 17:34:20 +0000 Subject: [PATCH 18/36] chore(python): add E231 to .flake8 ignore list (#516) Source-Link: https://github.com/googleapis/synthtool/commit/7ff4aad2ec5af0380e8bd6da1fa06eaadf24ec81 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 --- .flake8 | 2 +- .github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.flake8 b/.flake8 index 1a7e4989e..7f85294c9 100644 --- a/.flake8 +++ b/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude environment test code. tests/environment/** diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 87dd00611..9e0a9356b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe + digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 From 78de66b23bd9b355ef5e20b13a662eb0842d4d35 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 00:22:15 +0000 Subject: [PATCH 19/36] chore(python): update .pre-commit-config.yaml to use black==22.3.0 (#521) Source-Link: https://github.com/googleapis/synthtool/commit/7804ade3daae0d66649bee8df6c55484c6580b8d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d --- .github/.OwlBot.lock.yaml | 3 ++- .pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 9e0a9356b..22cc254af 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 + digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d +# created: 2022-03-30T23:44:26.560599165Z diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62eb5a77d..46d237160 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 From f7b9e1829eeff62c6eef4e49d3e4a5c75c4c22e7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 02:22:15 +0000 Subject: [PATCH 20/36] chore(python): Enable size-label bot (#523) Source-Link: https://github.com/googleapis/synthtool/commit/06e82790dd719a165ad32b8a06f8f6ec3e3cae0f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-label.yaml | 5 ----- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 22cc254af..58a0b153b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d -# created: 2022-03-30T23:44:26.560599165Z + digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce +# created: 2022-04-01T01:42:03.609279246Z diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 1e4706499..09c8d735b 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,7 +1,2 @@ -product: true requestsize: enabled: true -staleness: - pullrequest: true - old: 30 - extraold: 60 From e77454d8c37b41abfe3f7a04f4a421c5940125dc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 19:36:11 +0000 Subject: [PATCH 21/36] chore(python): refactor unit / system test dependency install (#525) Source-Link: https://github.com/googleapis/synthtool/commit/993985f0fc4b37152e588f0549bcbdaf34666023 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd --- .github/.OwlBot.lock.yaml | 4 +- noxfile.py | 127 ++++++++++++++++++++++++++++---------- 2 files changed, 96 insertions(+), 35 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 58a0b153b..fa5762290 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce -# created: 2022-04-01T01:42:03.609279246Z + digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd +# created: 2022-04-01T15:48:07.524222836Z diff --git a/noxfile.py b/noxfile.py index d2f8f0e56..49f754c74 100644 --- a/noxfile.py +++ b/noxfile.py @@ -20,16 +20,49 @@ import os import pathlib import shutil +import warnings import nox - BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [ + "flask", + "webob", + "django", +] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ + "google-cloud-bigquery", + "google-cloud-pubsub", + "google-cloud-storage", + "google-cloud-testutils", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -81,26 +114,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - session.install("flask", "-c", constraints_path) - session.install("webob", "-c", constraints_path) - session.install("django", "-c", constraints_path) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -124,6 +172,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -146,23 +223,7 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install( - "mock", - "pytest", - "google-cloud-testutils", - "google-cloud-bigquery", - "google-cloud-pubsub", - "google-cloud-storage", - "google-cloud-testutils", - "-c", - constraints_path, - ) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: From fc7275661f1ebbd0205268d59422ac6e311dcfb2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 4 Apr 2022 05:11:23 -0400 Subject: [PATCH 22/36] chore: allow releases on previous majors (#512) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: allow releases on previous majors * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .github/release-please.yml | 12 ++++++++++++ owlbot.py | 2 ++ 2 files changed, 14 insertions(+) diff --git a/.github/release-please.yml b/.github/release-please.yml index 466597e5b..5161ab347 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,2 +1,14 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v2 + handleGHRelease: true + releaseType: python +- branch: v1 + handleGHRelease: true + releaseType: python +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/owlbot.py b/owlbot.py index b266ed13f..f78036e69 100644 --- a/owlbot.py +++ b/owlbot.py @@ -103,5 +103,7 @@ python.py_samples() +python.configure_previous_major_version_branches() + s.shell.run(["nox", "-s", "blacken"], hide_output=False) From b76f09bcf1039bb9c8eaa6ca7488ea889c3d43e6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 07:14:13 -0400 Subject: [PATCH 23/36] chore(python): add license header to auto-label.yaml (#526) Source-Link: https://github.com/googleapis/synthtool/commit/eb78c980b52c7c6746d2edb77d9cf7aaa99a2aab Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-label.yaml | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index fa5762290..bc893c979 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd -# created: 2022-04-01T15:48:07.524222836Z + digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 +# created: 2022-04-06T10:30:21.687684602Z diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 09c8d735b..41bff0b53 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,2 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. requestsize: enabled: true From 383f2f0062d3703dfc7e2c331562fb88327cdf38 Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Fri, 8 Apr 2022 14:39:35 -0600 Subject: [PATCH 24/36] deps: Pin jinja2 version to fix CI (#522) * deps: Pin jinja2 version to fix CI * deps: Pin werkzeug version in sub-repo Co-authored-by: Anthonios Partheniou --- tests/environment | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/environment b/tests/environment index 21f1ea63a..fd113e1b4 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 21f1ea63a567dfd1b601f7cb8ee6177c77f82cc5 +Subproject commit fd113e1b444b823a62f0e55eecc14a8dc34f26ee From c8d58043f9c6df321cbbf48fa7e9a9fc9720d21a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 11 Apr 2022 02:02:23 +0200 Subject: [PATCH 25/36] chore(deps): update dependency google-cloud-bigquery to v3 (#520) Co-authored-by: Anthonios Partheniou --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index cc4d87623..541b22a36 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.0.0 -google-cloud-bigquery==2.34.3 +google-cloud-bigquery==3.0.1 google-cloud-storage==2.2.1 google-cloud-pubsub==2.11.0 From 22a9e8769fe258bdfd70802098afdb672d448493 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Apr 2022 07:43:57 -0400 Subject: [PATCH 26/36] chore: use gapic-generator-python 0.65.1 (#534) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.1 PiperOrigin-RevId: 441524537 Source-Link: https://github.com/googleapis/googleapis/commit/2a273915b3f70fe86c9d2a75470a0b83e48d0abf Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab6756a48c89b5bcb9fb73443cb8e55d574f4643 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWI2NzU2YTQ4Yzg5YjViY2I5ZmI3MzQ0M2NiOGU1NWQ1NzRmNDY0MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 20 +- .../services/config_service_v2/client.py | 20 +- .../config_service_v2/transports/base.py | 5 + .../config_service_v2/transports/grpc.py | 4 + .../logging_service_v2/async_client.py | 11 +- .../services/logging_service_v2/client.py | 22 +- .../logging_service_v2/transports/base.py | 5 + .../logging_service_v2/transports/grpc.py | 4 + .../metrics_service_v2/async_client.py | 2 +- .../services/metrics_service_v2/client.py | 2 +- .../metrics_service_v2/transports/base.py | 5 + .../metrics_service_v2/transports/grpc.py | 4 + google/cloud/logging_v2/types/log_entry.py | 2 +- google/cloud/logging_v2/types/logging.py | 4 +- .../cloud/logging_v2/types/logging_metrics.py | 2 +- .../snippet_metadata_logging_v2.json | 2956 ++++++++++++++++- .../logging_v2/test_config_service_v2.py | 103 +- .../logging_v2/test_logging_service_v2.py | 95 +- .../logging_v2/test_metrics_service_v2.py | 85 +- 19 files changed, 3075 insertions(+), 276 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index 916fbd18b..808766079 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -421,7 +421,6 @@ async def create_bucket( entries. After a bucket has been created, the bucket's location cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -506,7 +505,6 @@ async def update_bucket( After a bucket has been created, the bucket's location cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -584,7 +582,6 @@ async def delete_bucket( purged and all log entries in the bucket will be permanently deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -647,7 +644,6 @@ async def undelete_bucket( deleted can be undeleted within the grace period of 7 days. - .. code-block:: python from google.cloud import logging_v2 @@ -890,7 +886,6 @@ async def create_view( r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. - .. code-block:: python from google.cloud import logging_v2 @@ -968,7 +963,6 @@ async def update_view( indicates that system is not in a state where it can update the view. If this occurs, please try again in a few minutes. - .. code-block:: python from google.cloud import logging_v2 @@ -1044,7 +1038,6 @@ async def delete_view( can delete the view. If this occurs, please try again in a few minutes. - .. code-block:: python from google.cloud import logging_v2 @@ -1359,7 +1352,6 @@ async def create_sink( permitted to write to the destination. A sink can export log entries only from the resource owning the sink. - .. code-block:: python from google.cloud import logging_v2 @@ -1492,7 +1484,6 @@ async def update_sink( The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. - .. code-block:: python from google.cloud import logging_v2 @@ -1657,7 +1648,6 @@ async def delete_sink( r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -1765,7 +1755,6 @@ async def list_exclusions( r"""Lists all the exclusions on the \_Default sink in a parent resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2013,7 +2002,6 @@ async def create_exclusion( parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2143,7 +2131,6 @@ async def update_exclusion( r"""Changes one or more properties of an existing exclusion in the \_Default sink. - .. code-block:: python from google.cloud import logging_v2 @@ -2396,7 +2383,6 @@ async def get_cmek_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2497,7 +2483,6 @@ async def update_cmek_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2595,7 +2580,6 @@ async def get_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2730,7 +2714,6 @@ async def update_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2849,7 +2832,6 @@ async def copy_log_entries( r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. - .. code-block:: python from google.cloud import logging_v2 diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index d14ea70da..35e511abd 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -720,7 +720,6 @@ def create_bucket( entries. After a bucket has been created, the bucket's location cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -806,7 +805,6 @@ def update_bucket( After a bucket has been created, the bucket's location cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -885,7 +883,6 @@ def delete_bucket( purged and all log entries in the bucket will be permanently deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -949,7 +946,6 @@ def undelete_bucket( deleted can be undeleted within the grace period of 7 days. - .. code-block:: python from google.cloud import logging_v2 @@ -1194,7 +1190,6 @@ def create_view( r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. - .. code-block:: python from google.cloud import logging_v2 @@ -1273,7 +1268,6 @@ def update_view( indicates that system is not in a state where it can update the view. If this occurs, please try again in a few minutes. - .. code-block:: python from google.cloud import logging_v2 @@ -1350,7 +1344,6 @@ def delete_view( can delete the view. If this occurs, please try again in a few minutes. - .. code-block:: python from google.cloud import logging_v2 @@ -1644,7 +1637,6 @@ def create_sink( permitted to write to the destination. A sink can export log entries only from the resource owning the sink. - .. code-block:: python from google.cloud import logging_v2 @@ -1777,7 +1769,6 @@ def update_sink( The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. - .. code-block:: python from google.cloud import logging_v2 @@ -1931,7 +1922,6 @@ def delete_sink( r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -2028,7 +2018,6 @@ def list_exclusions( r"""Lists all the exclusions on the \_Default sink in a parent resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2254,7 +2243,6 @@ def create_exclusion( parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2384,7 +2372,6 @@ def update_exclusion( r"""Changes one or more properties of an existing exclusion in the \_Default sink. - .. code-block:: python from google.cloud import logging_v2 @@ -2626,7 +2613,6 @@ def get_cmek_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2728,7 +2714,6 @@ def update_cmek_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2827,7 +2812,6 @@ def get_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2962,7 +2946,6 @@ def update_settings( Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -3081,7 +3064,6 @@ def copy_log_entries( r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. - .. code-block:: python from google.cloud import logging_v2 diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 95de06d1a..685f174b4 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -88,6 +88,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -597,5 +598,9 @@ def copy_log_entries( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("ConfigServiceV2Transport",) diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 228f1c9a3..25de48851 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1023,5 +1023,9 @@ def copy_log_entries( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("ConfigServiceV2GrpcTransport",) diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 7973d4395..ec78309a6 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Mapping, Optional, AsyncIterable, Awaitable, @@ -229,7 +230,6 @@ async def delete_log( deleted. Entries received after the delete operation with a timestamp before the operation will be deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -329,7 +329,7 @@ async def write_log_entries( *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, - labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + labels: Mapping[str, str] = None, entries: Sequence[log_entry.LogEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -343,7 +343,6 @@ async def write_log_entries( maximum of 1000 different resources (projects, organizations, billing accounts or folders) - .. code-block:: python from google.cloud import logging_v2 @@ -411,7 +410,7 @@ def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]`): + labels (:class:`Mapping[str, str]`): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a @@ -535,7 +534,6 @@ async def list_log_entries( For ways to export log entries, see `Exporting Logs `__. - .. code-block:: python from google.cloud import logging_v2 @@ -693,7 +691,6 @@ async def list_monitored_resource_descriptors( r"""Lists the descriptors for monitored resource types used by Logging. - .. code-block:: python from google.cloud import logging_v2 @@ -787,7 +784,6 @@ async def list_logs( or billing accounts. Only logs that have entries are listed. - .. code-block:: python from google.cloud import logging_v2 @@ -910,7 +906,6 @@ def tail_log_entries( Until the stream is terminated, it will continue reading logs. - .. code-block:: python from google.cloud import logging_v2 diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 8638cfb10..47c5bfe82 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core import client_options as client_options_lib @@ -438,7 +448,6 @@ def delete_log( deleted. Entries received after the delete operation with a timestamp before the operation will be deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -527,7 +536,7 @@ def write_log_entries( *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, - labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + labels: Mapping[str, str] = None, entries: Sequence[log_entry.LogEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -541,7 +550,6 @@ def write_log_entries( maximum of 1000 different resources (projects, organizations, billing accounts or folders) - .. code-block:: python from google.cloud import logging_v2 @@ -609,7 +617,7 @@ def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + labels (Mapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a @@ -721,7 +729,6 @@ def list_log_entries( For ways to export log entries, see `Exporting Logs `__. - .. code-block:: python from google.cloud import logging_v2 @@ -868,7 +875,6 @@ def list_monitored_resource_descriptors( r"""Lists the descriptors for monitored resource types used by Logging. - .. code-block:: python from google.cloud import logging_v2 @@ -954,7 +960,6 @@ def list_logs( or billing accounts. Only logs that have entries are listed. - .. code-block:: python from google.cloud import logging_v2 @@ -1066,7 +1071,6 @@ def tail_log_entries( Until the stream is terminated, it will continue reading logs. - .. code-block:: python from google.cloud import logging_v2 diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 716a2fbbc..ceefeda8a 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -87,6 +87,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -296,5 +297,9 @@ def tail_log_entries( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("LoggingServiceV2Transport",) diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 176d4475f..22affa06b 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -409,5 +409,9 @@ def tail_log_entries( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("LoggingServiceV2GrpcTransport",) diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index af6265e82..e9b59bf30 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index bb2221b85..df5d4d2fc 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index cc483aeff..eae5f5da7 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -87,6 +87,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -255,5 +256,9 @@ def delete_log_metric( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("MetricsServiceV2Transport",) diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 6c1fd9b73..12d70452f 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -363,5 +363,9 @@ def delete_log_metric( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("MetricsServiceV2GrpcTransport",) diff --git a/google/cloud/logging_v2/types/log_entry.py b/google/cloud/logging_v2/types/log_entry.py index 2bdea1b73..cc3469428 100644 --- a/google/cloud/logging_v2/types/log_entry.py +++ b/google/cloud/logging_v2/types/log_entry.py @@ -144,7 +144,7 @@ class LogEntry(proto.Message): http_request (google.logging.type.http_request_pb2.HttpRequest): Optional. Information about the HTTP request associated with this log entry, if applicable. - labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): + labels (Mapping[str, str]): Optional. A map of key, value pairs that provides additional information about the log entry. The labels can be user-defined or system-defined. diff --git a/google/cloud/logging_v2/types/logging.py b/google/cloud/logging_v2/types/logging.py index 383a4ef77..42bb9dbb8 100644 --- a/google/cloud/logging_v2/types/logging.py +++ b/google/cloud/logging_v2/types/logging.py @@ -103,7 +103,7 @@ class WriteLogEntriesRequest(proto.Message): "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See [LogEntry][google.logging.v2.LogEntry]. - labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + labels (Mapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this @@ -192,7 +192,7 @@ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. Attributes: - log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): + log_entry_errors (Mapping[int, google.rpc.status_pb2.Status]): When ``WriteLogEntriesRequest.partial_success`` is true, records the error status for entries that were not written due to a permanent error, keyed by the entry's zero-based diff --git a/google/cloud/logging_v2/types/logging_metrics.py b/google/cloud/logging_v2/types/logging_metrics.py index 323599423..bcad752b3 100644 --- a/google/cloud/logging_v2/types/logging_metrics.py +++ b/google/cloud/logging_v2/types/logging_metrics.py @@ -128,7 +128,7 @@ class LogMetric(proto.Message): Example: ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors (Sequence[google.cloud.logging_v2.types.LogMetric.LabelExtractorsEntry]): + label_extractors (Mapping[str, str]): Optional. A map from a label key string to an extractor expression which is used to extract data from a log entry field and assign as the label value. Each label key diff --git a/samples/generated_samples/snippet_metadata_logging_v2.json b/samples/generated_samples/snippet_metadata_logging_v2.json index b6ad799b1..657563cd6 100644 --- a/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/samples/generated_samples/snippet_metadata_logging_v2.json @@ -1,16 +1,57 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.logging.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-logging" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.copy_log_entries", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CopyLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "copy_log_entries" }, + "description": "Sample for CopyLogEntries", "file": "logging_v2_generated_config_service_v2_copy_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async", "segments": [ { @@ -43,18 +84,50 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_copy_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.copy_log_entries", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CopyLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "copy_log_entries" }, + "description": "Sample for CopyLogEntries", "file": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync", "segments": [ { @@ -87,19 +160,51 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, + "description": "Sample for CreateBucket", "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { @@ -132,18 +237,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, + "description": "Sample for CreateBucket", "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { @@ -176,19 +313,59 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, + "description": "Sample for CreateExclusion", "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", "segments": [ { @@ -221,18 +398,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, + "description": "Sample for CreateExclusion", "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", "segments": [ { @@ -265,19 +482,59 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" }, + "description": "Sample for CreateSink", "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", "segments": [ { @@ -310,18 +567,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" }, + "description": "Sample for CreateSink", "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", "segments": [ { @@ -354,19 +651,51 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" }, + "description": "Sample for CreateView", "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", "segments": [ { @@ -399,18 +728,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" }, + "description": "Sample for CreateView", "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", "segments": [ { @@ -443,19 +804,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" }, + "description": "Sample for DeleteBucket", "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", "segments": [ { @@ -486,18 +878,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" }, + "description": "Sample for DeleteBucket", "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", "segments": [ { @@ -528,19 +951,54 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" }, + "description": "Sample for DeleteExclusion", "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", "segments": [ { @@ -571,18 +1029,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" }, + "description": "Sample for DeleteExclusion", "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { @@ -613,19 +1106,54 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_sink" }, + "description": "Sample for DeleteSink", "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", "segments": [ { @@ -656,18 +1184,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_sink" }, + "description": "Sample for DeleteSink", "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { @@ -698,19 +1261,50 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" }, + "description": "Sample for DeleteView", "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", "segments": [ { @@ -741,18 +1335,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" }, + "description": "Sample for DeleteView", "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", "segments": [ { @@ -783,19 +1408,51 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, + "description": "Sample for GetBucket", "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { @@ -828,18 +1485,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, + "description": "Sample for GetBucket", "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { @@ -872,19 +1561,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, + "description": "Sample for GetCmekSettings", "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { @@ -917,18 +1638,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, + "description": "Sample for GetCmekSettings", "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { @@ -961,19 +1714,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, + "description": "Sample for GetExclusion", "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { @@ -1006,18 +1795,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, + "description": "Sample for GetExclusion", "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { @@ -1050,19 +1875,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, + "description": "Sample for GetSettings", "file": "logging_v2_generated_config_service_v2_get_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", "segments": [ { @@ -1095,18 +1956,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, + "description": "Sample for GetSettings", "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", "segments": [ { @@ -1139,19 +2036,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, + "description": "Sample for GetSink", "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { @@ -1184,18 +2117,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, + "description": "Sample for GetSink", "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { @@ -1228,19 +2197,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, + "description": "Sample for GetView", "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { @@ -1273,18 +2274,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, + "description": "Sample for GetView", "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { @@ -1317,19 +2350,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListBuckets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" }, + "description": "Sample for ListBuckets", "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { @@ -1362,18 +2431,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListBuckets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" }, + "description": "Sample for ListBuckets", "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { @@ -1406,19 +2511,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListExclusions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", + "shortName": "list_exclusions" }, + "description": "Sample for ListExclusions", "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { @@ -1451,18 +2592,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListExclusions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", + "shortName": "list_exclusions" }, + "description": "Sample for ListExclusions", "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { @@ -1495,19 +2672,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_sinks", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListSinks" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", + "shortName": "list_sinks" }, + "description": "Sample for ListSinks", "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", "segments": [ { @@ -1540,18 +2753,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_sinks_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_sinks", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListSinks" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", + "shortName": "list_sinks" }, + "description": "Sample for ListSinks", "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", "segments": [ { @@ -1584,19 +2833,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_sinks_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_views", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListViews" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListViewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", + "shortName": "list_views" }, + "description": "Sample for ListViews", "file": "logging_v2_generated_config_service_v2_list_views_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", "segments": [ { @@ -1629,18 +2914,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_views_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_views", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListViews" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListViewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", + "shortName": "list_views" }, + "description": "Sample for ListViews", "file": "logging_v2_generated_config_service_v2_list_views_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", "segments": [ { @@ -1673,19 +2994,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_views_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.undelete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UndeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "undelete_bucket" }, + "description": "Sample for UndeleteBucket", "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { @@ -1716,18 +3068,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.undelete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UndeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "undelete_bucket" }, + "description": "Sample for UndeleteBucket", "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { @@ -1758,19 +3141,51 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" }, + "description": "Sample for UpdateBucket", "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", "segments": [ { @@ -1803,18 +3218,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" }, + "description": "Sample for UpdateBucket", "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", "segments": [ { @@ -1847,19 +3294,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "update_cmek_settings" }, + "description": "Sample for UpdateCmekSettings", "file": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async", "segments": [ { @@ -1892,18 +3371,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "update_cmek_settings" }, + "description": "Sample for UpdateCmekSettings", "file": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync", "segments": [ { @@ -1936,19 +3447,63 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "update_exclusion" }, + "description": "Sample for UpdateExclusion", "file": "logging_v2_generated_config_service_v2_update_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async", "segments": [ { @@ -1981,18 +3536,62 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "update_exclusion" }, + "description": "Sample for UpdateExclusion", "file": "logging_v2_generated_config_service_v2_update_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync", "segments": [ { @@ -2025,19 +3624,59 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "update_settings" }, + "description": "Sample for UpdateSettings", "file": "logging_v2_generated_config_service_v2_update_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async", "segments": [ { @@ -2070,18 +3709,58 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "update_settings" }, + "description": "Sample for UpdateSettings", "file": "logging_v2_generated_config_service_v2_update_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_sync", "segments": [ { @@ -2114,19 +3793,63 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "update_sink" }, + "description": "Sample for UpdateSink", "file": "logging_v2_generated_config_service_v2_update_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async", "segments": [ { @@ -2159,18 +3882,62 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "update_sink" }, + "description": "Sample for UpdateSink", "file": "logging_v2_generated_config_service_v2_update_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync", "segments": [ { @@ -2203,19 +3970,51 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "update_view" }, + "description": "Sample for UpdateView", "file": "logging_v2_generated_config_service_v2_update_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async", "segments": [ { @@ -2248,18 +4047,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "update_view" }, + "description": "Sample for UpdateView", "file": "logging_v2_generated_config_service_v2_update_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync", "segments": [ { @@ -2292,19 +4123,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.delete_log", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "DeleteLog" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log" }, + "description": "Sample for DeleteLog", "file": "logging_v2_generated_logging_service_v2_delete_log_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", "segments": [ { @@ -2335,18 +4201,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_delete_log_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.delete_log", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "DeleteLog" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log" }, + "description": "Sample for DeleteLog", "file": "logging_v2_generated_logging_service_v2_delete_log_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", "segments": [ { @@ -2377,19 +4278,63 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_delete_log_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogEntriesRequest" + }, + { + "name": "resource_names", + "type": "Sequence[str]" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "order_by", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager", + "shortName": "list_log_entries" }, + "description": "Sample for ListLogEntries", "file": "logging_v2_generated_logging_service_v2_list_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", "segments": [ { @@ -2422,18 +4367,62 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogEntriesRequest" + }, + { + "name": "resource_names", + "type": "Sequence[str]" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "order_by", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager", + "shortName": "list_log_entries" }, + "description": "Sample for ListLogEntries", "file": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", "segments": [ { @@ -2466,19 +4455,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_logs", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogs", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager", + "shortName": "list_logs" }, + "description": "Sample for ListLogs", "file": "logging_v2_generated_logging_service_v2_list_logs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", "segments": [ { @@ -2511,18 +4536,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_logs_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_logs", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogs", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager", + "shortName": "list_logs" }, + "description": "Sample for ListLogs", "file": "logging_v2_generated_logging_service_v2_list_logs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", "segments": [ { @@ -2555,19 +4616,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_logs_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_monitored_resource_descriptors", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListMonitoredResourceDescriptors" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager", + "shortName": "list_monitored_resource_descriptors" }, + "description": "Sample for ListMonitoredResourceDescriptors", "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", "segments": [ { @@ -2600,18 +4693,50 @@ "start": 41, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_monitored_resource_descriptors", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListMonitoredResourceDescriptors" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager", + "shortName": "list_monitored_resource_descriptors" }, + "description": "Sample for ListMonitoredResourceDescriptors", "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", "segments": [ { @@ -2644,19 +4769,51 @@ "start": 41, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.tail_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "TailLogEntries" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", + "shortName": "tail_log_entries" }, + "description": "Sample for TailLogEntries", "file": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", "segments": [ { @@ -2689,18 +4846,50 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.tail_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "TailLogEntries" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", + "shortName": "tail_log_entries" }, + "description": "Sample for TailLogEntries", "file": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", "segments": [ { @@ -2733,19 +4922,67 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.write_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "WriteLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.WriteLogEntriesRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "resource", + "type": "google.api.monitored_resource_pb2.MonitoredResource" + }, + { + "name": "labels", + "type": "Mapping[str, str]" + }, + { + "name": "entries", + "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", + "shortName": "write_log_entries" }, + "description": "Sample for WriteLogEntries", "file": "logging_v2_generated_logging_service_v2_write_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", "segments": [ { @@ -2778,18 +5015,66 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_write_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.write_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "WriteLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.WriteLogEntriesRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "resource", + "type": "google.api.monitored_resource_pb2.MonitoredResource" + }, + { + "name": "labels", + "type": "Mapping[str, str]" + }, + { + "name": "entries", + "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", + "shortName": "write_log_entries" }, + "description": "Sample for WriteLogEntries", "file": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", "segments": [ { @@ -2822,19 +5107,59 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.create_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "CreateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLogMetricRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "create_log_metric" }, + "description": "Sample for CreateLogMetric", "file": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async", "segments": [ { @@ -2867,18 +5192,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.create_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "CreateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLogMetricRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "create_log_metric" }, + "description": "Sample for CreateLogMetric", "file": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync", "segments": [ { @@ -2911,19 +5276,54 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.delete_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "DeleteLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log_metric" }, + "description": "Sample for DeleteLogMetric", "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async", "segments": [ { @@ -2954,18 +5354,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.delete_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "DeleteLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log_metric" }, + "description": "Sample for DeleteLogMetric", "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync", "segments": [ { @@ -2996,19 +5431,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.get_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "GetLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "get_log_metric" }, + "description": "Sample for GetLogMetric", "file": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async", "segments": [ { @@ -3041,18 +5512,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.get_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "GetLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "get_log_metric" }, + "description": "Sample for GetLogMetric", "file": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync", "segments": [ { @@ -3085,19 +5592,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.list_log_metrics", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "ListLogMetrics" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogMetricsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager", + "shortName": "list_log_metrics" }, + "description": "Sample for ListLogMetrics", "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async", "segments": [ { @@ -3130,18 +5673,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.list_log_metrics", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "ListLogMetrics" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogMetricsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager", + "shortName": "list_log_metrics" }, + "description": "Sample for ListLogMetrics", "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync", "segments": [ { @@ -3174,19 +5753,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.update_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "UpdateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "update_log_metric" }, + "description": "Sample for UpdateLogMetric", "file": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async", "segments": [ { @@ -3219,18 +5838,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.update_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "UpdateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "update_log_metric" }, + "description": "Sample for UpdateLogMetric", "file": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync", "segments": [ { @@ -3263,7 +5922,8 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py" } ] } diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 75227b5b3..18982084f 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -94,24 +94,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, + (ConfigServiceV2Client, "grpc"), + (ConfigServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_config_service_v2_client_from_service_account_info(client_class): +def test_config_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") @pytest.mark.parametrize( @@ -140,27 +142,33 @@ def test_config_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, + (ConfigServiceV2Client, "grpc"), + (ConfigServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_config_service_v2_client_from_service_account_file(client_class): +def test_config_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") def test_config_service_v2_client_get_transport_class(): @@ -1031,7 +1039,7 @@ async def test_list_buckets_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1077,7 +1085,9 @@ async def test_list_buckets_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_buckets(request={})).pages: + async for page_ in ( + await client.list_buckets(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2239,7 +2249,7 @@ async def test_list_views_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2285,7 +2295,9 @@ async def test_list_views_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_views(request={})).pages: + async for page_ in ( + await client.list_views(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -3271,7 +3283,7 @@ async def test_list_sinks_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -3317,7 +3329,9 @@ async def test_list_sinks_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_sinks(request={})).pages: + async for page_ in ( + await client.list_sinks(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -4728,7 +4742,7 @@ async def test_list_exclusions_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -4774,7 +4788,9 @@ async def test_list_exclusions_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_exclusions(request={})).pages: + async for page_ in ( + await client.list_exclusions(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -6779,6 +6795,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = ConfigServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ConfigServiceV2Client( @@ -6851,6 +6880,14 @@ def test_config_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -7016,24 +7053,40 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_ ) -def test_config_service_v2_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") -def test_config_service_v2_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:8000" + assert client.transport._host == ("logging.googleapis.com:8000") def test_config_service_v2_grpc_transport_channel(): diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 09cff71ee..1f74ac5b2 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -96,24 +96,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_logging_service_v2_client_from_service_account_info(client_class): +def test_logging_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") @pytest.mark.parametrize( @@ -142,27 +144,33 @@ def test_logging_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_logging_service_v2_client_from_service_account_file(client_class): +def test_logging_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") def test_logging_service_v2_client_get_transport_class(): @@ -1415,7 +1423,7 @@ async def test_list_log_entries_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1461,7 +1469,9 @@ async def test_list_log_entries_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_log_entries(request={})).pages: + async for page_ in ( + await client.list_log_entries(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1703,7 +1713,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1756,7 +1766,7 @@ async def test_list_monitored_resource_descriptors_async_pages(): pages = [] async for page_ in ( await client.list_monitored_resource_descriptors(request={}) - ).pages: + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2132,7 +2142,7 @@ async def test_list_logs_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2178,7 +2188,9 @@ async def test_list_logs_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_logs(request={})).pages: + async for page_ in ( + await client.list_logs(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2347,6 +2359,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = LoggingServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = LoggingServiceV2Client( @@ -2394,6 +2419,14 @@ def test_logging_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2563,24 +2596,40 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport ) -def test_logging_service_v2_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") -def test_logging_service_v2_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:8000" + assert client.transport._host == ("logging.googleapis.com:8000") def test_logging_service_v2_grpc_transport_channel(): diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 5ce917c6e..53ced9ce4 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -94,24 +94,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, + (MetricsServiceV2Client, "grpc"), + (MetricsServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_metrics_service_v2_client_from_service_account_info(client_class): +def test_metrics_service_v2_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") @pytest.mark.parametrize( @@ -140,27 +142,33 @@ def test_metrics_service_v2_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, + (MetricsServiceV2Client, "grpc"), + (MetricsServiceV2AsyncClient, "grpc_asyncio"), ], ) -def test_metrics_service_v2_client_from_service_account_file(client_class): +def test_metrics_service_v2_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") def test_metrics_service_v2_client_get_transport_class(): @@ -1032,7 +1040,7 @@ async def test_list_log_metrics_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1078,7 +1086,9 @@ async def test_list_log_metrics_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_log_metrics(request={})).pages: + async for page_ in ( + await client.list_log_metrics(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2213,6 +2223,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = MetricsServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MetricsServiceV2Client( @@ -2259,6 +2282,14 @@ def test_metrics_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2428,24 +2459,40 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport ) -def test_metrics_service_v2_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:443" + assert client.transport._host == ("logging.googleapis.com:443") -def test_metrics_service_v2_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "logging.googleapis.com:8000" + assert client.transport._host == ("logging.googleapis.com:8000") def test_metrics_service_v2_grpc_transport_channel(): From 1595e4203faeb3d46b28a7d98f68761998e3aa26 Mon Sep 17 00:00:00 2001 From: losalex <90795544+losalex@users.noreply.github.com> Date: Sat, 16 Apr 2022 01:00:33 -0500 Subject: [PATCH 27/36] fix: Reenable staleness bot (#535) * fix: Reenable staleness bot Reenable staleness bot on the repo * Update auto-label.yaml --- .github/auto-label.yaml | 4 ++++ owlbot.py | 1 + 2 files changed, 5 insertions(+) diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index 41bff0b53..ccad49b4e 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -13,3 +13,7 @@ # limitations under the License. requestsize: enabled: true +staleness: + pullrequest: true + old: 30 + extraold: 60 diff --git a/owlbot.py b/owlbot.py index f78036e69..cf8252bcc 100644 --- a/owlbot.py +++ b/owlbot.py @@ -66,6 +66,7 @@ ".coveragerc", "docs/multiprocessing.rst", ".github/workflows", # exclude gh actions as credentials are needed for tests + ".github/auto-label.yaml", ]) # adjust .trampolinerc for environment tests From ef8d34d9939b64349f875bd60efb0981ab842f93 Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Tue, 19 Apr 2022 10:14:16 -0600 Subject: [PATCH 28/36] chore: Update env-tests-logging submodule (#537) --- tests/environment | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/environment b/tests/environment index fd113e1b4..be8b03308 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit fd113e1b444b823a62f0e55eecc14a8dc34f26ee +Subproject commit be8b033084726bb7a3f62130157e1da3500f6855 From 48d313ccd10ec0a374394a4727e416f6f27bc7a3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:34:07 -0400 Subject: [PATCH 29/36] chore(python): add nox session to sort python imports (#538) Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- noxfile.py | 27 ++++++++++++++++++++++++--- samples/snippets/noxfile.py | 21 +++++++++++++++++++++ 3 files changed, 47 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index bc893c979..7c454abf7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 -# created: 2022-04-06T10:30:21.687684602Z + digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 +# created: 2022-04-20T23:42:53.970438194Z diff --git a/noxfile.py b/noxfile.py index 49f754c74..6cfcca2ee 100644 --- a/noxfile.py +++ b/noxfile.py @@ -25,7 +25,8 @@ import nox BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" @@ -92,7 +93,7 @@ def lint(session): session.run( "black", "--check", - *BLACK_PATHS, + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -103,7 +104,27 @@ def blacken(session): session.install(BLACK_VERSION) session.run( "black", - *BLACK_PATHS, + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 949e0fde9..38bb0a572 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -30,6 +30,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +169,32 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # From 28d48b99cad464d4cbcb1ddeb0dce9f2a5403084 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 16:24:17 +0000 Subject: [PATCH 30/36] chore(python): use ubuntu 22.04 in docs image (#539) Source-Link: https://github.com/googleapis/synthtool/commit/f15cc72fb401b4861cedebb10af74afe428fb1f8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/docker/docs/Dockerfile | 20 ++++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7c454abf7..64f82d6bf 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 -# created: 2022-04-20T23:42:53.970438194Z + digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd +# created: 2022-04-21T15:43:16.246106921Z diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 4e1b1fb8b..238b87b9d 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] From 43d3dc081e054f343d57292e492f6e89b82c570d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 27 Apr 2022 10:38:29 -0600 Subject: [PATCH 31/36] chore: use gapic-generator-python 0.65.2 (#540) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.2 PiperOrigin-RevId: 444333013 Source-Link: https://github.com/googleapis/googleapis/commit/f91b6cf82e929280f6562f6110957c654bd9e2e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/16eb36095c294e712c74a1bf23550817b42174e5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZlYjM2MDk1YzI5NGU3MTJjNzRhMWJmMjM1NTA4MTdiNDIxNzRlNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 156 ++++++------- .../logging_service_v2/async_client.py | 38 ++-- .../metrics_service_v2/async_client.py | 30 +-- .../logging_v2/test_config_service_v2.py | 208 +++++++++--------- .../logging_v2/test_logging_service_v2.py | 22 +- .../logging_v2/test_metrics_service_v2.py | 42 ++-- 6 files changed, 248 insertions(+), 248 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index 808766079..0a325ce86 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -235,9 +235,9 @@ async def list_buckets( from google.cloud import logging_v2 - def sample_list_buckets(): + async def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListBucketsRequest( @@ -248,7 +248,7 @@ def sample_list_buckets(): page_result = client.list_buckets(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -351,9 +351,9 @@ async def get_bucket( from google.cloud import logging_v2 - def sample_get_bucket(): + async def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetBucketRequest( @@ -361,7 +361,7 @@ def sample_get_bucket(): ) # Make the request - response = client.get_bucket(request=request) + response = await client.get_bucket(request=request) # Handle the response print(response) @@ -425,9 +425,9 @@ async def create_bucket( from google.cloud import logging_v2 - def sample_create_bucket(): + async def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.CreateBucketRequest( @@ -436,7 +436,7 @@ def sample_create_bucket(): ) # Make the request - response = client.create_bucket(request=request) + response = await client.create_bucket(request=request) # Handle the response print(response) @@ -509,9 +509,9 @@ async def update_bucket( from google.cloud import logging_v2 - def sample_update_bucket(): + async def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateBucketRequest( @@ -519,7 +519,7 @@ def sample_update_bucket(): ) # Make the request - response = client.update_bucket(request=request) + response = await client.update_bucket(request=request) # Handle the response print(response) @@ -586,9 +586,9 @@ async def delete_bucket( from google.cloud import logging_v2 - def sample_delete_bucket(): + async def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteBucketRequest( @@ -596,7 +596,7 @@ def sample_delete_bucket(): ) # Make the request - client.delete_bucket(request=request) + await client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -648,9 +648,9 @@ async def undelete_bucket( from google.cloud import logging_v2 - def sample_undelete_bucket(): + async def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UndeleteBucketRequest( @@ -658,7 +658,7 @@ def sample_undelete_bucket(): ) # Make the request - client.undelete_bucket(request=request) + await client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -709,9 +709,9 @@ async def list_views( from google.cloud import logging_v2 - def sample_list_views(): + async def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListViewsRequest( @@ -722,7 +722,7 @@ def sample_list_views(): page_result = client.list_views(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -817,9 +817,9 @@ async def get_view( from google.cloud import logging_v2 - def sample_get_view(): + async def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetViewRequest( @@ -827,7 +827,7 @@ def sample_get_view(): ) # Make the request - response = client.get_view(request=request) + response = await client.get_view(request=request) # Handle the response print(response) @@ -890,9 +890,9 @@ async def create_view( from google.cloud import logging_v2 - def sample_create_view(): + async def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.CreateViewRequest( @@ -901,7 +901,7 @@ def sample_create_view(): ) # Make the request - response = client.create_view(request=request) + response = await client.create_view(request=request) # Handle the response print(response) @@ -967,9 +967,9 @@ async def update_view( from google.cloud import logging_v2 - def sample_update_view(): + async def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateViewRequest( @@ -977,7 +977,7 @@ def sample_update_view(): ) # Make the request - response = client.update_view(request=request) + response = await client.update_view(request=request) # Handle the response print(response) @@ -1042,9 +1042,9 @@ async def delete_view( from google.cloud import logging_v2 - def sample_delete_view(): + async def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteViewRequest( @@ -1052,7 +1052,7 @@ def sample_delete_view(): ) # Make the request - client.delete_view(request=request) + await client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -1103,9 +1103,9 @@ async def list_sinks( from google.cloud import logging_v2 - def sample_list_sinks(): + async def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListSinksRequest( @@ -1116,7 +1116,7 @@ def sample_list_sinks(): page_result = client.list_sinks(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1227,9 +1227,9 @@ async def get_sink( from google.cloud import logging_v2 - def sample_get_sink(): + async def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetSinkRequest( @@ -1237,7 +1237,7 @@ def sample_get_sink(): ) # Make the request - response = client.get_sink(request=request) + response = await client.get_sink(request=request) # Handle the response print(response) @@ -1356,9 +1356,9 @@ async def create_sink( from google.cloud import logging_v2 - def sample_create_sink(): + async def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) sink = logging_v2.LogSink() @@ -1371,7 +1371,7 @@ def sample_create_sink(): ) # Make the request - response = client.create_sink(request=request) + response = await client.create_sink(request=request) # Handle the response print(response) @@ -1488,9 +1488,9 @@ async def update_sink( from google.cloud import logging_v2 - def sample_update_sink(): + async def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) sink = logging_v2.LogSink() @@ -1503,7 +1503,7 @@ def sample_update_sink(): ) # Make the request - response = client.update_sink(request=request) + response = await client.update_sink(request=request) # Handle the response print(response) @@ -1652,9 +1652,9 @@ async def delete_sink( from google.cloud import logging_v2 - def sample_delete_sink(): + async def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteSinkRequest( @@ -1662,7 +1662,7 @@ def sample_delete_sink(): ) # Make the request - client.delete_sink(request=request) + await client.delete_sink(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): @@ -1759,9 +1759,9 @@ async def list_exclusions( from google.cloud import logging_v2 - def sample_list_exclusions(): + async def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListExclusionsRequest( @@ -1772,7 +1772,7 @@ def sample_list_exclusions(): page_result = client.list_exclusions(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1883,9 +1883,9 @@ async def get_exclusion( from google.cloud import logging_v2 - def sample_get_exclusion(): + async def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetExclusionRequest( @@ -1893,7 +1893,7 @@ def sample_get_exclusion(): ) # Make the request - response = client.get_exclusion(request=request) + response = await client.get_exclusion(request=request) # Handle the response print(response) @@ -2006,9 +2006,9 @@ async def create_exclusion( from google.cloud import logging_v2 - def sample_create_exclusion(): + async def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) exclusion = logging_v2.LogExclusion() @@ -2021,7 +2021,7 @@ def sample_create_exclusion(): ) # Make the request - response = client.create_exclusion(request=request) + response = await client.create_exclusion(request=request) # Handle the response print(response) @@ -2135,9 +2135,9 @@ async def update_exclusion( from google.cloud import logging_v2 - def sample_update_exclusion(): + async def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) exclusion = logging_v2.LogExclusion() @@ -2150,7 +2150,7 @@ def sample_update_exclusion(): ) # Make the request - response = client.update_exclusion(request=request) + response = await client.update_exclusion(request=request) # Handle the response print(response) @@ -2275,9 +2275,9 @@ async def delete_exclusion( from google.cloud import logging_v2 - def sample_delete_exclusion(): + async def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteExclusionRequest( @@ -2285,7 +2285,7 @@ def sample_delete_exclusion(): ) # Make the request - client.delete_exclusion(request=request) + await client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): @@ -2387,9 +2387,9 @@ async def get_cmek_settings( from google.cloud import logging_v2 - def sample_get_cmek_settings(): + async def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetCmekSettingsRequest( @@ -2397,7 +2397,7 @@ def sample_get_cmek_settings(): ) # Make the request - response = client.get_cmek_settings(request=request) + response = await client.get_cmek_settings(request=request) # Handle the response print(response) @@ -2487,9 +2487,9 @@ async def update_cmek_settings( from google.cloud import logging_v2 - def sample_update_cmek_settings(): + async def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateCmekSettingsRequest( @@ -2497,7 +2497,7 @@ def sample_update_cmek_settings(): ) # Make the request - response = client.update_cmek_settings(request=request) + response = await client.update_cmek_settings(request=request) # Handle the response print(response) @@ -2584,9 +2584,9 @@ async def get_settings( from google.cloud import logging_v2 - def sample_get_settings(): + async def sample_get_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetSettingsRequest( @@ -2594,7 +2594,7 @@ def sample_get_settings(): ) # Make the request - response = client.get_settings(request=request) + response = await client.get_settings(request=request) # Handle the response print(response) @@ -2718,9 +2718,9 @@ async def update_settings( from google.cloud import logging_v2 - def sample_update_settings(): + async def sample_update_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateSettingsRequest( @@ -2728,7 +2728,7 @@ def sample_update_settings(): ) # Make the request - response = client.update_settings(request=request) + response = await client.update_settings(request=request) # Handle the response print(response) @@ -2836,9 +2836,9 @@ async def copy_log_entries( from google.cloud import logging_v2 - def sample_copy_log_entries(): + async def sample_copy_log_entries(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.CopyLogEntriesRequest( @@ -2851,7 +2851,7 @@ def sample_copy_log_entries(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index ec78309a6..32a1d1808 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -234,9 +234,9 @@ async def delete_log( from google.cloud import logging_v2 - def sample_delete_log(): + async def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteLogRequest( @@ -244,7 +244,7 @@ def sample_delete_log(): ) # Make the request - client.delete_log(request=request) + await client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -347,9 +347,9 @@ async def write_log_entries( from google.cloud import logging_v2 - def sample_write_log_entries(): + async def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) entries = logging_v2.LogEntry() @@ -360,7 +360,7 @@ def sample_write_log_entries(): ) # Make the request - response = client.write_log_entries(request=request) + response = await client.write_log_entries(request=request) # Handle the response print(response) @@ -538,9 +538,9 @@ async def list_log_entries( from google.cloud import logging_v2 - def sample_list_log_entries(): + async def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( @@ -551,7 +551,7 @@ def sample_list_log_entries(): page_result = client.list_log_entries(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -695,9 +695,9 @@ async def list_monitored_resource_descriptors( from google.cloud import logging_v2 - def sample_list_monitored_resource_descriptors(): + async def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListMonitoredResourceDescriptorsRequest( @@ -707,7 +707,7 @@ def sample_list_monitored_resource_descriptors(): page_result = client.list_monitored_resource_descriptors(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -788,9 +788,9 @@ async def list_logs( from google.cloud import logging_v2 - def sample_list_logs(): + async def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogsRequest( @@ -801,7 +801,7 @@ def sample_list_logs(): page_result = client.list_logs(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -910,9 +910,9 @@ def tail_log_entries( from google.cloud import logging_v2 - def sample_tail_log_entries(): + async def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( @@ -930,10 +930,10 @@ def request_generator(): yield request # Make the request - stream = client.tail_log_entries(requests=request_generator()) + stream = await client.tail_log_entries(requests=request_generator()) # Handle the response - for response in stream: + async for response in stream: print(response) Args: diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index e9b59bf30..816b70695 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -220,9 +220,9 @@ async def list_log_metrics( from google.cloud import logging_v2 - def sample_list_log_metrics(): + async def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogMetricsRequest( @@ -233,7 +233,7 @@ def sample_list_log_metrics(): page_result = client.list_log_metrics(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -341,9 +341,9 @@ async def get_log_metric( from google.cloud import logging_v2 - def sample_get_log_metric(): + async def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetLogMetricRequest( @@ -351,7 +351,7 @@ def sample_get_log_metric(): ) # Make the request - response = client.get_log_metric(request=request) + response = await client.get_log_metric(request=request) # Handle the response print(response) @@ -461,9 +461,9 @@ async def create_log_metric( from google.cloud import logging_v2 - def sample_create_log_metric(): + async def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) metric = logging_v2.LogMetric() @@ -476,7 +476,7 @@ def sample_create_log_metric(): ) # Make the request - response = client.create_log_metric(request=request) + response = await client.create_log_metric(request=request) # Handle the response print(response) @@ -586,9 +586,9 @@ async def update_log_metric( from google.cloud import logging_v2 - def sample_update_log_metric(): + async def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) metric = logging_v2.LogMetric() @@ -601,7 +601,7 @@ def sample_update_log_metric(): ) # Make the request - response = client.update_log_metric(request=request) + response = await client.update_log_metric(request=request) # Handle the response print(response) @@ -722,9 +722,9 @@ async def delete_log_metric( from google.cloud import logging_v2 - def sample_delete_log_metric(): + async def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteLogMetricRequest( @@ -732,7 +732,7 @@ def sample_delete_log_metric(): ) # Make the request - client.delete_log_metric(request=request) + await client.delete_log_metric(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 18982084f..caa7bd689 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -771,7 +771,7 @@ def test_list_buckets_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -787,7 +787,7 @@ def test_list_buckets_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -801,7 +801,7 @@ async def test_list_buckets_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -819,7 +819,7 @@ async def test_list_buckets_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -950,7 +950,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogBucket) for i in results) @@ -1211,7 +1211,7 @@ def test_get_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: @@ -1227,7 +1227,7 @@ def test_get_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1241,7 +1241,7 @@ async def test_get_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: @@ -1259,7 +1259,7 @@ async def test_get_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1381,7 +1381,7 @@ def test_create_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: @@ -1397,7 +1397,7 @@ def test_create_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1411,7 +1411,7 @@ async def test_create_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: @@ -1429,7 +1429,7 @@ async def test_create_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1551,7 +1551,7 @@ def test_update_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: @@ -1567,7 +1567,7 @@ def test_update_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1581,7 +1581,7 @@ async def test_update_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: @@ -1599,7 +1599,7 @@ async def test_update_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1693,7 +1693,7 @@ def test_delete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: @@ -1709,7 +1709,7 @@ def test_delete_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1723,7 +1723,7 @@ async def test_delete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: @@ -1739,7 +1739,7 @@ async def test_delete_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1833,7 +1833,7 @@ def test_undelete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: @@ -1849,7 +1849,7 @@ def test_undelete_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1863,7 +1863,7 @@ async def test_undelete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: @@ -1879,7 +1879,7 @@ async def test_undelete_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1981,7 +1981,7 @@ def test_list_views_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1997,7 +1997,7 @@ def test_list_views_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2011,7 +2011,7 @@ async def test_list_views_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -2029,7 +2029,7 @@ async def test_list_views_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2160,7 +2160,7 @@ def test_list_views_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogView) for i in results) @@ -2409,7 +2409,7 @@ def test_get_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: @@ -2425,7 +2425,7 @@ def test_get_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2439,7 +2439,7 @@ async def test_get_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: @@ -2457,7 +2457,7 @@ async def test_get_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2567,7 +2567,7 @@ def test_create_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: @@ -2583,7 +2583,7 @@ def test_create_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2597,7 +2597,7 @@ async def test_create_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: @@ -2615,7 +2615,7 @@ async def test_create_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2725,7 +2725,7 @@ def test_update_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: @@ -2741,7 +2741,7 @@ def test_update_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2755,7 +2755,7 @@ async def test_update_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: @@ -2773,7 +2773,7 @@ async def test_update_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2867,7 +2867,7 @@ def test_delete_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: @@ -2883,7 +2883,7 @@ def test_delete_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -2897,7 +2897,7 @@ async def test_delete_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: @@ -2913,7 +2913,7 @@ async def test_delete_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -3015,7 +3015,7 @@ def test_list_sinks_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -3031,7 +3031,7 @@ def test_list_sinks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -3045,7 +3045,7 @@ async def test_list_sinks_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -3063,7 +3063,7 @@ async def test_list_sinks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -3194,7 +3194,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogSink) for i in results) @@ -3466,7 +3466,7 @@ def test_get_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3482,7 +3482,7 @@ def test_get_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -3496,7 +3496,7 @@ async def test_get_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3514,7 +3514,7 @@ async def test_get_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -3729,7 +3729,7 @@ def test_create_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3745,7 +3745,7 @@ def test_create_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -3759,7 +3759,7 @@ async def test_create_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3777,7 +3777,7 @@ async def test_create_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -4002,7 +4002,7 @@ def test_update_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -4018,7 +4018,7 @@ def test_update_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -4032,7 +4032,7 @@ async def test_update_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -4050,7 +4050,7 @@ async def test_update_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -4246,7 +4246,7 @@ def test_delete_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -4262,7 +4262,7 @@ def test_delete_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -4276,7 +4276,7 @@ async def test_delete_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = "sink_name/value" + request.sink_name = "sink_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -4292,7 +4292,7 @@ async def test_delete_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "sink_name=sink_name/value", + "sink_name=sink_name_value", ) in kw["metadata"] @@ -4474,7 +4474,7 @@ def test_list_exclusions_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4490,7 +4490,7 @@ def test_list_exclusions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -4504,7 +4504,7 @@ async def test_list_exclusions_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4522,7 +4522,7 @@ async def test_list_exclusions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -4653,7 +4653,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogExclusion) for i in results) @@ -4906,7 +4906,7 @@ def test_get_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4922,7 +4922,7 @@ def test_get_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -4936,7 +4936,7 @@ async def test_get_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4954,7 +4954,7 @@ async def test_get_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5150,7 +5150,7 @@ def test_create_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -5166,7 +5166,7 @@ def test_create_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -5180,7 +5180,7 @@ async def test_create_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -5198,7 +5198,7 @@ async def test_create_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -5404,7 +5404,7 @@ def test_update_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -5420,7 +5420,7 @@ def test_update_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5434,7 +5434,7 @@ async def test_update_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -5452,7 +5452,7 @@ async def test_update_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5648,7 +5648,7 @@ def test_delete_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5664,7 +5664,7 @@ def test_delete_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5678,7 +5678,7 @@ async def test_delete_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5694,7 +5694,7 @@ async def test_delete_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5890,7 +5890,7 @@ def test_get_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5908,7 +5908,7 @@ def test_get_cmek_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -5922,7 +5922,7 @@ async def test_get_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5942,7 +5942,7 @@ async def test_get_cmek_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6059,7 +6059,7 @@ def test_update_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6077,7 +6077,7 @@ def test_update_cmek_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6091,7 +6091,7 @@ async def test_update_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -6111,7 +6111,7 @@ async def test_update_cmek_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6229,7 +6229,7 @@ def test_get_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: @@ -6245,7 +6245,7 @@ def test_get_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6259,7 +6259,7 @@ async def test_get_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_settings), "__call__") as call: @@ -6277,7 +6277,7 @@ async def test_get_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6477,7 +6477,7 @@ def test_update_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: @@ -6493,7 +6493,7 @@ def test_update_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -6507,7 +6507,7 @@ async def test_update_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSettingsRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_settings), "__call__") as call: @@ -6525,7 +6525,7 @@ async def test_update_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 1f74ac5b2..e7cbfcd8a 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -766,7 +766,7 @@ def test_delete_log_field_headers(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = "log_name/value" + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: @@ -782,7 +782,7 @@ def test_delete_log_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "log_name=log_name/value", + "log_name=log_name_value", ) in kw["metadata"] @@ -796,7 +796,7 @@ async def test_delete_log_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = "log_name/value" + request.log_name = "log_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: @@ -812,7 +812,7 @@ async def test_delete_log_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "log_name=log_name/value", + "log_name=log_name_value", ) in kw["metadata"] @@ -1334,7 +1334,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, log_entry.LogEntry) for i in results) @@ -1617,7 +1617,7 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all( isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) @@ -1874,7 +1874,7 @@ def test_list_logs_field_headers(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1890,7 +1890,7 @@ def test_list_logs_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1904,7 +1904,7 @@ async def test_list_logs_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1922,7 +1922,7 @@ async def test_list_logs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2053,7 +2053,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, str) for i in results) diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 53ced9ce4..456d43946 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -772,7 +772,7 @@ def test_list_log_metrics_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -788,7 +788,7 @@ def test_list_log_metrics_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -802,7 +802,7 @@ async def test_list_log_metrics_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -820,7 +820,7 @@ async def test_list_log_metrics_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -951,7 +951,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_metrics.LogMetric) for i in results) @@ -1212,7 +1212,7 @@ def test_get_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: @@ -1228,7 +1228,7 @@ def test_get_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -1242,7 +1242,7 @@ async def test_get_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: @@ -1260,7 +1260,7 @@ async def test_get_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -1470,7 +1470,7 @@ def test_create_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1488,7 +1488,7 @@ def test_create_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1502,7 +1502,7 @@ async def test_create_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1522,7 +1522,7 @@ async def test_create_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1746,7 +1746,7 @@ def test_update_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1764,7 +1764,7 @@ def test_update_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -1778,7 +1778,7 @@ async def test_update_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1798,7 +1798,7 @@ async def test_update_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -1994,7 +1994,7 @@ def test_delete_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2012,7 +2012,7 @@ def test_delete_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] @@ -2026,7 +2026,7 @@ async def test_delete_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = "metric_name/value" + request.metric_name = "metric_name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2044,7 +2044,7 @@ async def test_delete_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "metric_name=metric_name/value", + "metric_name=metric_name_value", ) in kw["metadata"] From 422a77d93655fba3406ecf397cf417ad37dd1ce1 Mon Sep 17 00:00:00 2001 From: Drew Brown Date: Thu, 28 Apr 2022 13:55:34 -0600 Subject: [PATCH 32/36] docs: Add link to interactive walkthrough (#541) --- README.rst | 8 +++++++ docs/_static/guide-me.svg | 45 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 docs/_static/guide-me.svg diff --git a/README.rst b/README.rst index e5017619e..93b601ba9 100644 --- a/README.rst +++ b/README.rst @@ -34,6 +34,14 @@ In order to use this library, you first need to go through the following steps: .. _Enable the Cloud Logging API.: https://cloud.google.com/logging .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +For an interactive walkthrough on how to use this library in a python application, click the Guide Me button below: + +.. raw:: html + + + + + Installation ~~~~~~~~~~~~ diff --git a/docs/_static/guide-me.svg b/docs/_static/guide-me.svg new file mode 100644 index 000000000..c0196b96b --- /dev/null +++ b/docs/_static/guide-me.svg @@ -0,0 +1,45 @@ + + + + + + GUIDE ME + + + + + + + From f6bd611676b100f81c16e6c8cf6f008bd8f679a7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 12:12:18 -0400 Subject: [PATCH 33/36] chore: [autoapprove] update readme_gen.py to include autoescape True (#546) Source-Link: https://github.com/googleapis/synthtool/commit/6b4d5a6407d740beb4158b302194a62a4108a8a6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- scripts/readme-gen/readme_gen.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 64f82d6bf..b631901e9 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd -# created: 2022-04-21T15:43:16.246106921Z + digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 +# created: 2022-05-05T15:17:27.599381182Z diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index d309d6e97..91b59676b 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') From dbfee0205b1dc95ca7000159365bfb5041b95719 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 May 2022 15:00:42 -0700 Subject: [PATCH 34/36] chore(python): auto approve template changes (#549) Source-Link: https://github.com/googleapis/synthtool/commit/453a5d9c9a55d1969240a37d36cec626d20a9024 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/auto-approve.yml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 .github/auto-approve.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b631901e9..757c9dca7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 -# created: 2022-05-05T15:17:27.599381182Z + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 000000000..311ebbb85 --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" From 471eccb42988a53a09ad20c856cc888bbe26a298 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 8 May 2022 14:30:50 +0200 Subject: [PATCH 35/36] chore(deps): update all dependencies (#532) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Drew Brown Co-authored-by: Anthonios Partheniou --- samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 541b22a36..acdfd4276 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.0.0 google-cloud-bigquery==3.0.1 -google-cloud-storage==2.2.1 -google-cloud-pubsub==2.11.0 +google-cloud-storage==2.3.0 +google-cloud-pubsub==2.12.0 From 737a9e24ea0c070ecd34c899b47b8539c41da77d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 May 2022 12:14:17 -0400 Subject: [PATCH 36/36] chore(main): release 3.1.0 (#479) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 28 ++++++++++++++++++++++++++++ setup.py | 2 +- 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9bfce6bf1..f859d7236 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,34 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.1.0](https://github.com/googleapis/python-logging/compare/v3.0.0...v3.1.0) (2022-05-08) + + +### Features + +* KMS configuration in settings ([#489](https://github.com/googleapis/python-logging/issues/489)) ([6699f8c](https://github.com/googleapis/python-logging/commit/6699f8c545d1a9904a945a9d789d7220da9433bf)) +* Update Logging API with latest changes ([6699f8c](https://github.com/googleapis/python-logging/commit/6699f8c545d1a9904a945a9d789d7220da9433bf)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#494](https://github.com/googleapis/python-logging/issues/494)) ([ab14563](https://github.com/googleapis/python-logging/commit/ab145630ffbb25a88cc058569b9e425e62b32ced)) +* fix system test for mtls ([#485](https://github.com/googleapis/python-logging/issues/485)) ([96bb6f7](https://github.com/googleapis/python-logging/commit/96bb6f786c91656b52624fbbf52e036b1a908d53)) +* Reenable staleness bot ([#535](https://github.com/googleapis/python-logging/issues/535)) ([1595e42](https://github.com/googleapis/python-logging/commit/1595e4203faeb3d46b28a7d98f68761998e3aa26)) +* remove unnecessary detect_resource calls from CloudLoggingHandler ([#484](https://github.com/googleapis/python-logging/issues/484)) ([def7440](https://github.com/googleapis/python-logging/commit/def7440ac6964451f3202b5117e3060ec62045b0)) +* resolve DuplicateCredentialArgs error when using credentials_file ([265061e](https://github.com/googleapis/python-logging/commit/265061eae8396caaef3fdfeae80e0a120f9a5cda)) + + +### Dependencies + +* Pin jinja2 version to fix CI ([#522](https://github.com/googleapis/python-logging/issues/522)) ([383f2f0](https://github.com/googleapis/python-logging/commit/383f2f0062d3703dfc7e2c331562fb88327cdf38)) + + +### Documentation + +* add generated snippets ([6699f8c](https://github.com/googleapis/python-logging/commit/6699f8c545d1a9904a945a9d789d7220da9433bf)) +* Add link to interactive walkthrough ([#541](https://github.com/googleapis/python-logging/issues/541)) ([422a77d](https://github.com/googleapis/python-logging/commit/422a77d93655fba3406ecf397cf417ad37dd1ce1)) + ## [3.0.0](https://github.com/googleapis/python-logging/compare/v2.7.0...v3.0.0) (2022-01-27) diff --git a/setup.py b/setup.py index 2b39d7bbd..dc26595c0 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "3.0.0" +version = "3.1.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'