diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b5c26ed01..da616c91a 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:457583330eec64daa02aeb7a72a04d33e7be2428f646671ce4045dcbc0191b1e + digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1bbd78783..4f00c7cff 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.1 + rev: 3.9.2 hooks: - id: flake8 diff --git a/CHANGELOG.md b/CHANGELOG.md index 1828b3e69..3dd7ab48b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [2.5.0](https://www.github.com/googleapis/python-logging/compare/v2.4.0...v2.5.0) (2021-06-10) + + +### Features + +* support AuditLog and RequestLog protos ([#274](https://www.github.com/googleapis/python-logging/issues/274)) ([5d91be9](https://www.github.com/googleapis/python-logging/commit/5d91be9f121c364cbd53c6a9fffc4fb6ca6bd324)) + + +### Bug Fixes + +* **deps:** add packaging requirement ([#300](https://www.github.com/googleapis/python-logging/issues/300)) ([68c5cec](https://www.github.com/googleapis/python-logging/commit/68c5ceced3288253af8e3c6013a35fa3954b37bc)) +* structured log handler formatting issues ([#319](https://www.github.com/googleapis/python-logging/issues/319)) ([db9da37](https://www.github.com/googleapis/python-logging/commit/db9da3700511b5a24c3c44c9f4377705937caf46)) + ## [2.4.0](https://www.github.com/googleapis/python-logging/compare/v2.3.1...v2.4.0) (2021-05-12) diff --git a/docs/conf.py b/docs/conf.py index 8e1d46bc7..b60a9ce4c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -363,6 +363,7 @@ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/google/cloud/logging_v2/entries.py b/google/cloud/logging_v2/entries.py index 87e042018..fa7e5d9d1 100644 --- a/google/cloud/logging_v2/entries.py +++ b/google/cloud/logging_v2/entries.py @@ -27,6 +27,9 @@ from google.cloud._helpers import _rfc3339_nanos_to_datetime from google.cloud._helpers import _datetime_to_rfc3339 +# import officially supported proto definitions +import google.cloud.audit.audit_log_pb2 # noqa: F401 +import google.cloud.appengine_logging # noqa: F401 _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -316,13 +319,18 @@ def payload_pb(self): @property def payload_json(self): - if not isinstance(self.payload, Any): + if isinstance(self.payload, collections.abc.Mapping): return self.payload def to_api_repr(self): """API repr (JSON format) for entry.""" info = super(ProtobufEntry, self).to_api_repr() - info["protoPayload"] = MessageToDict(self.payload) + proto_payload = None + if self.payload_json: + proto_payload = dict(self.payload_json) + elif self.payload_pb: + proto_payload = MessageToDict(self.payload_pb) + info["protoPayload"] = proto_payload return info def parse_message(self, message): diff --git a/google/cloud/logging_v2/gapic_metadata.json b/google/cloud/logging_v2/gapic_metadata.json new file mode 100644 index 000000000..da4eefd47 --- /dev/null +++ b/google/cloud/logging_v2/gapic_metadata.json @@ -0,0 +1,391 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.logging_v2", + "protoPackage": "google.logging.v2", + "schema": "1.0", + "services": { + "ConfigServiceV2": { + "clients": { + "grpc": { + "libraryClient": "ConfigServiceV2Client", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConfigServiceV2AsyncClient", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + } + } + }, + "LoggingServiceV2": { + "clients": { + "grpc": { + "libraryClient": "LoggingServiceV2Client", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LoggingServiceV2AsyncClient", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + } + } + }, + "MetricsServiceV2": { + "clients": { + "grpc": { + "libraryClient": "MetricsServiceV2Client", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetricsServiceV2AsyncClient", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/logging_v2/handlers/structured_log.py b/google/cloud/logging_v2/handlers/structured_log.py index 0edb5c39e..f0b4c69ec 100644 --- a/google/cloud/logging_v2/handlers/structured_log.py +++ b/google/cloud/logging_v2/handlers/structured_log.py @@ -14,13 +14,13 @@ """Logging handler for printing formatted structured logs to standard output. """ - +import json import logging.handlers from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter GCP_FORMAT = ( - '{"message": "%(_msg_str)s", ' + '{"message": %(_formatted_msg)s, ' '"severity": "%(levelname)s", ' '"logging.googleapis.com/labels": %(_labels_str)s, ' '"logging.googleapis.com/trace": "%(_trace_str)s", ' @@ -50,7 +50,7 @@ def __init__(self, *, labels=None, stream=None, project_id=None): self.addFilter(log_filter) # make logs appear in GCP structured logging format - self.formatter = logging.Formatter(GCP_FORMAT) + self._gcp_formatter = logging.Formatter(GCP_FORMAT) def format(self, record): """Format the message into structured log JSON. @@ -59,6 +59,12 @@ def format(self, record): Returns: str: A JSON string formatted for GKE fluentd. """ - - payload = self.formatter.format(record) - return payload + # let other formatters alter the message + super_payload = None + if record.msg: + super_payload = super(StructuredLogHandler, self).format(record) + # properly break any formatting in string to make it json safe + record._formatted_msg = json.dumps(super_payload or "") + # convert to GCP structred logging format + gcp_payload = self._gcp_formatter.format(record) + return gcp_payload diff --git a/google/cloud/logging_v2/proto/log_entry.proto b/google/cloud/logging_v2/proto/log_entry.proto deleted file mode 100644 index 3ad2cfbb5..000000000 --- a/google/cloud/logging_v2/proto/log_entry.proto +++ /dev/null @@ -1,210 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.logging.v2; - -import "google/api/field_behavior.proto"; -import "google/api/monitored_resource.proto"; -import "google/api/resource.proto"; -import "google/logging/type/http_request.proto"; -import "google/logging/type/log_severity.proto"; -import "google/protobuf/any.proto"; -import "google/protobuf/struct.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; -import "google/api/annotations.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LogEntryProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; -option ruby_package = "Google::Cloud::Logging::V2"; - -// An individual entry in a log. -// -// -message LogEntry { - option (google.api.resource) = { - type: "logging.googleapis.com/Log" - pattern: "projects/{project}/logs/{log}" - pattern: "organizations/{organization}/logs/{log}" - pattern: "folders/{folder}/logs/{log}" - pattern: "billingAccounts/{billing_account}/logs/{log}" - name_field: "log_name" - }; - - // Required. The resource name of the log to which this log entry belongs: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // A project number may be used in place of PROJECT_ID. The project number is - // translated to its corresponding PROJECT_ID internally and the `log_name` - // field will contain PROJECT_ID in queries and exports. - // - // `[LOG_ID]` must be URL-encoded within `log_name`. Example: - // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - // `[LOG_ID]` must be less than 512 characters long and can only include the - // following characters: upper and lower case alphanumeric characters, - // forward-slash, underscore, hyphen, and period. - // - // For backward compatibility, if `log_name` begins with a forward-slash, such - // as `/projects/...`, then the log entry is ingested as usual but the - // forward-slash is removed. Listing the log entry will not show the leading - // slash and filtering for a log name with a leading slash will never return - // any results. - string log_name = 12 [(google.api.field_behavior) = REQUIRED]; - - // Required. The monitored resource that produced this log entry. - // - // Example: a log entry that reports a database error would be associated with - // the monitored resource designating the particular database that reported - // the error. - google.api.MonitoredResource resource = 8 [(google.api.field_behavior) = REQUIRED]; - - // The log entry payload, which can be one of multiple types. - oneof payload { - // The log entry payload, represented as a protocol buffer. Some Google - // Cloud Platform services use this field for their log entry payloads. - // - // The following protocol buffer types are supported; user-defined types - // are not supported: - // - // "type.googleapis.com/google.cloud.audit.AuditLog" - // "type.googleapis.com/google.appengine.logging.v1.RequestLog" - google.protobuf.Any proto_payload = 2; - - // The log entry payload, represented as a Unicode string (UTF-8). - string text_payload = 3; - - // The log entry payload, represented as a structure that is - // expressed as a JSON object. - google.protobuf.Struct json_payload = 6; - } - - // Optional. The time the event described by the log entry occurred. This time is used - // to compute the log entry's age and to enforce the logs retention period. - // If this field is omitted in a new log entry, then Logging assigns it the - // current time. Timestamps have nanosecond accuracy, but trailing zeros in - // the fractional seconds might be omitted when the timestamp is displayed. - // - // Incoming log entries must have timestamps that don't exceed the - // [logs retention - // period](https://cloud.google.com/logging/quotas#logs_retention_periods) in - // the past, and that don't exceed 24 hours in the future. Log entries outside - // those time boundaries aren't ingested by Logging. - google.protobuf.Timestamp timestamp = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The time the log entry was received by Logging. - google.protobuf.Timestamp receive_timestamp = 24 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The severity of the log entry. The default value is `LogSeverity.DEFAULT`. - google.logging.type.LogSeverity severity = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A unique identifier for the log entry. If you provide a value, then - // Logging considers other log entries in the same project, with the same - // `timestamp`, and with the same `insert_id` to be duplicates which are - // removed in a single query result. However, there are no guarantees of - // de-duplication in the export of logs. - // - // If the `insert_id` is omitted when writing a log entry, the Logging API - // assigns its own unique identifier in this field. - // - // In queries, the `insert_id` is also used to order log entries that have - // the same `log_name` and `timestamp` values. - string insert_id = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Information about the HTTP request associated with this log entry, if - // applicable. - google.logging.type.HttpRequest http_request = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A set of user-defined (key, value) data that provides additional - // information about the log entry. - map labels = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Information about an operation associated with the log entry, if - // applicable. - LogEntryOperation operation = 15 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Resource name of the trace associated with the log entry, if any. If it - // contains a relative resource name, the name is assumed to be relative to - // `//tracing.googleapis.com`. Example: - // `projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824` - string trace = 22 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The span ID within the trace associated with the log entry. - // - // For Trace spans, this is the same format that the Trace API v2 uses: a - // 16-character hexadecimal encoding of an 8-byte array, such as - // `000000000000004a`. - string span_id = 27 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The sampling decision of the trace associated with the log entry. - // - // True means that the trace resource name in the `trace` field was sampled - // for storage in a trace backend. False means that the trace was not sampled - // for storage when this log entry was written, or the sampling decision was - // unknown at the time. A non-sampled `trace` value is still useful as a - // request correlation identifier. The default is False. - bool trace_sampled = 30 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Source code location information associated with the log entry, if any. - LogEntrySourceLocation source_location = 23 [(google.api.field_behavior) = OPTIONAL]; -} - -// Additional information about a potentially long-running operation with which -// a log entry is associated. -message LogEntryOperation { - // Optional. An arbitrary operation identifier. Log entries with the same - // identifier are assumed to be part of the same operation. - string id = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. An arbitrary producer identifier. The combination of `id` and - // `producer` must be globally unique. Examples for `producer`: - // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. - string producer = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Set this to True if this is the first log entry in the operation. - bool first = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Set this to True if this is the last log entry in the operation. - bool last = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Additional information about the source code location that produced the log -// entry. -message LogEntrySourceLocation { - // Optional. Source file name. Depending on the runtime environment, this - // might be a simple name or a fully-qualified name. - string file = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Line within the source file. 1-based; 0 indicates no line number - // available. - int64 line = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Human-readable name of the function or method being invoked, with - // optional context such as the class or package name. This information may be - // used in contexts such as the logs viewer, where a file and line number are - // less meaningful. The format can vary by language. For example: - // `qual.if.ied.Class.method` (Java), `dir/package.func` (Go), `function` - // (Python). - string function = 3 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/google/cloud/logging_v2/proto/logging.proto b/google/cloud/logging_v2/proto/logging.proto deleted file mode 100644 index f8b01a71e..000000000 --- a/google/cloud/logging_v2/proto/logging.proto +++ /dev/null @@ -1,478 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.logging.v2; - -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/monitored_resource.proto"; -import "google/api/resource.proto"; -import "google/logging/v2/log_entry.proto"; -import "google/logging/v2/logging_config.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; -import "google/api/annotations.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LoggingProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; -option ruby_package = "Google::Cloud::Logging::V2"; - -// Service for ingesting and querying logs. -service LoggingServiceV2 { - option (google.api.default_host) = "logging.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/cloud-platform.read-only," - "https://www.googleapis.com/auth/logging.admin," - "https://www.googleapis.com/auth/logging.read," - "https://www.googleapis.com/auth/logging.write"; - - // Deletes all the log entries in a log. The log reappears if it receives new - // entries. Log entries written shortly before the delete operation might not - // be deleted. Entries received after the delete operation with a timestamp - // before the operation will be deleted. - rpc DeleteLog(DeleteLogRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{log_name=projects/*/logs/*}" - additional_bindings { - delete: "/v2/{log_name=*/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=organizations/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=folders/*/logs/*}" - } - additional_bindings { - delete: "/v2/{log_name=billingAccounts/*/logs/*}" - } - }; - option (google.api.method_signature) = "log_name"; - } - - // Writes log entries to Logging. This API method is the - // only way to send log entries to Logging. This method - // is used, directly or indirectly, by the Logging agent - // (fluentd) and all logging libraries configured to use Logging. - // A single request may contain log entries for a maximum of 1000 - // different resources (projects, organizations, billing accounts or - // folders) - rpc WriteLogEntries(WriteLogEntriesRequest) returns (WriteLogEntriesResponse) { - option (google.api.http) = { - post: "/v2/entries:write" - body: "*" - }; - option (google.api.method_signature) = "log_name,resource,labels,entries"; - } - - // Lists log entries. Use this method to retrieve log entries that originated - // from a project/folder/organization/billing account. For ways to export log - // entries, see [Exporting - // Logs](https://cloud.google.com/logging/docs/export). - rpc ListLogEntries(ListLogEntriesRequest) returns (ListLogEntriesResponse) { - option (google.api.http) = { - post: "/v2/entries:list" - body: "*" - }; - option (google.api.method_signature) = "resource_names,filter,order_by"; - } - - // Lists the descriptors for monitored resource types used by Logging. - rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) returns (ListMonitoredResourceDescriptorsResponse) { - option (google.api.http) = { - get: "/v2/monitoredResourceDescriptors" - }; - } - - // Lists the logs in projects, organizations, folders, or billing accounts. - // Only logs that have entries are listed. - rpc ListLogs(ListLogsRequest) returns (ListLogsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/logs" - additional_bindings { - get: "/v2/{parent=projects/*}/logs" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/logs" - } - additional_bindings { - get: "/v2/{parent=folders/*}/logs" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/logs" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Streaming read of log entries as they are ingested. Until the stream is - // terminated, it will continue reading logs. - rpc TailLogEntries(stream TailLogEntriesRequest) returns (stream TailLogEntriesResponse) { - option (google.api.http) = { - post: "/v2/entries:tail" - body: "*" - }; - } -} - -// The parameters to DeleteLog. -message DeleteLogRequest { - // Required. The resource name of the log to delete: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // `[LOG_ID]` must be URL-encoded. For example, - // `"projects/my-project-id/logs/syslog"`, - // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - // For more information about log names, see - // [LogEntry][google.logging.v2.LogEntry]. - string log_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/Log" - } - ]; -} - -// The parameters to WriteLogEntries. -message WriteLogEntriesRequest { - // Optional. A default log resource name that is assigned to all log entries - // in `entries` that do not specify a value for `log_name`: - // - // "projects/[PROJECT_ID]/logs/[LOG_ID]" - // "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - // "folders/[FOLDER_ID]/logs/[LOG_ID]" - // - // `[LOG_ID]` must be URL-encoded. For example: - // - // "projects/my-project-id/logs/syslog" - // "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" - // - // The permission `logging.logEntries.create` is needed on each project, - // organization, billing account, or folder that is receiving new log - // entries, whether the resource is specified in `logName` or in an - // individual log entry. - string log_name = 1 [ - (google.api.field_behavior) = OPTIONAL, - (google.api.resource_reference) = { - type: "logging.googleapis.com/Log" - } - ]; - - // Optional. A default monitored resource object that is assigned to all log - // entries in `entries` that do not specify a value for `resource`. Example: - // - // { "type": "gce_instance", - // "labels": { - // "zone": "us-central1-a", "instance_id": "00000000000000000000" }} - // - // See [LogEntry][google.logging.v2.LogEntry]. - google.api.MonitoredResource resource = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Default labels that are added to the `labels` field of all log - // entries in `entries`. If a log entry already has a label with the same key - // as a label in this parameter, then the log entry's label is not changed. - // See [LogEntry][google.logging.v2.LogEntry]. - map labels = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The log entries to send to Logging. The order of log - // entries in this list does not matter. Values supplied in this method's - // `log_name`, `resource`, and `labels` fields are copied into those log - // entries in this list that do not include values for their corresponding - // fields. For more information, see the - // [LogEntry][google.logging.v2.LogEntry] type. - // - // If the `timestamp` or `insert_id` fields are missing in log entries, then - // this method supplies the current time or a unique identifier, respectively. - // The supplied values are chosen so that, among the log entries that did not - // supply their own values, the entries earlier in the list will sort before - // the entries later in the list. See the `entries.list` method. - // - // Log entries with timestamps that are more than the - // [logs retention period](https://cloud.google.com/logging/quota-policy) in - // the past or more than 24 hours in the future will not be available when - // calling `entries.list`. However, those log entries can still be [exported - // with - // LogSinks](https://cloud.google.com/logging/docs/api/tasks/exporting-logs). - // - // To improve throughput and to avoid exceeding the - // [quota limit](https://cloud.google.com/logging/quota-policy) for calls to - // `entries.write`, you should try to include several log entries in this - // list, rather than calling this method for each individual log entry. - repeated LogEntry entries = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Whether valid entries should be written even if some other - // entries fail due to INVALID_ARGUMENT or PERMISSION_DENIED errors. If any - // entry is not written, then the response status is the error associated - // with one of the failed entries and the response includes error details - // keyed by the entries' zero-based index in the `entries.write` method. - bool partial_success = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If true, the request should expect normal response, but the - // entries won't be persisted nor exported. Useful for checking whether the - // logging API endpoints are working properly before sending valuable data. - bool dry_run = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from WriteLogEntries. -message WriteLogEntriesResponse {} - -// Error details for WriteLogEntries with partial success. -message WriteLogEntriesPartialErrors { - // When `WriteLogEntriesRequest.partial_success` is true, records the error - // status for entries that were not written due to a permanent error, keyed - // by the entry's zero-based index in `WriteLogEntriesRequest.entries`. - // - // Failed requests for which no entries are written will not include - // per-entry errors. - map log_entry_errors = 1; -} - -// The parameters to `ListLogEntries`. -message ListLogEntriesRequest { - // Required. Names of one or more parent resources from which to - // retrieve log entries: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - // - // May alternatively be one or more views - // projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // - // Projects listed in the `project_ids` field are added to this list. - repeated string resource_names = 8 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/Log" - } - ]; - - // Optional. A filter that chooses which log entries to return. See [Advanced - // Logs Queries](https://cloud.google.com/logging/docs/view/advanced-queries). - // Only log entries that match the filter are returned. An empty filter - // matches all log entries in the resources listed in `resource_names`. - // Referencing a parent resource that is not listed in `resource_names` will - // cause the filter to return no results. The maximum length of the filter is - // 20000 characters. - string filter = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. How the results should be sorted. Presently, the only permitted - // values are `"timestamp asc"` (default) and `"timestamp desc"`. The first - // option returns entries in order of increasing values of - // `LogEntry.timestamp` (oldest first), and the second option returns entries - // in order of decreasing timestamps (newest first). Entries with equal - // timestamps are returned in order of their `insert_id` values. - string order_by = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Default is 50. If the value is negative or exceeds 1000, - // the request is rejected. The presence of `next_page_token` in the - // response indicates that more results might be available. - int32 page_size = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `page_token` must be the value of - // `next_page_token` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from `ListLogEntries`. -message ListLogEntriesResponse { - // A list of log entries. If `entries` is empty, `nextPageToken` may still be - // returned, indicating that more entries may exist. See `nextPageToken` for - // more information. - repeated LogEntry entries = 1; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - // - // If a value for `next_page_token` appears and the `entries` field is empty, - // it means that the search found no log entries so far but it did not have - // time to search all the possible log entries. Retry the method with this - // value for `page_token` to continue the search. Alternatively, consider - // speeding up the search by changing your filter to specify a single log name - // or resource type, or to narrow the time range of the search. - string next_page_token = 2; -} - -// The parameters to ListMonitoredResourceDescriptors -message ListMonitoredResourceDescriptorsRequest { - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from ListMonitoredResourceDescriptors. -message ListMonitoredResourceDescriptorsResponse { - // A list of resource descriptors. - repeated google.api.MonitoredResourceDescriptor resource_descriptors = 1; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to ListLogs. -message ListLogsRequest { - // Required. The resource name that owns the logs: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/Log" - } - ]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The resource name that owns the logs: - // projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - // - // To support legacy queries, it could also be: - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - repeated string resource_names = 8 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from ListLogs. -message ListLogsResponse { - // A list of log names. For example, - // `"projects/my-project/logs/syslog"` or - // `"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"`. - repeated string log_names = 3; - - // If there might be more results than those appearing in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `TailLogEntries`. -message TailLogEntriesRequest { - // Required. Name of a parent resource from which to retrieve log entries: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - // - // May alternatively be one or more views: - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - repeated string resource_names = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A filter that chooses which log entries to return. See [Advanced - // Logs Filters](https://cloud.google.com/logging/docs/view/advanced_filters). - // Only log entries that match the filter are returned. An empty filter - // matches all log entries in the resources listed in `resource_names`. - // Referencing a parent resource that is not in `resource_names` will cause - // the filter to return no results. The maximum length of the filter is 20000 - // characters. - string filter = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The amount of time to buffer log entries at the server before - // being returned to prevent out of order results due to late arriving log - // entries. Valid values are between 0-60000 milliseconds. Defaults to 2000 - // milliseconds. - google.protobuf.Duration buffer_window = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from `TailLogEntries`. -message TailLogEntriesResponse { - // Information about entries that were omitted from the session. - message SuppressionInfo { - // An indicator of why entries were omitted. - enum Reason { - // Unexpected default. - REASON_UNSPECIFIED = 0; - - // Indicates suppression occurred due to relevant entries being - // received in excess of rate limits. For quotas and limits, see - // [Logging API quotas and - // limits](https://cloud.google.com/logging/quotas#api-limits). - RATE_LIMIT = 1; - - // Indicates suppression occurred due to the client not consuming - // responses quickly enough. - NOT_CONSUMED = 2; - } - - // The reason that entries were omitted from the session. - Reason reason = 1; - - // A lower bound on the count of entries omitted due to `reason`. - int32 suppressed_count = 2; - } - - // A list of log entries. Each response in the stream will order entries with - // increasing values of `LogEntry.timestamp`. Ordering is not guaranteed - // between separate responses. - repeated LogEntry entries = 1; - - // If entries that otherwise would have been included in the session were not - // sent back to the client, counts of relevant entries omitted from the - // session with the reason that they were not included. There will be at most - // one of each reason per response. The counts represent the number of - // suppressed entries since the last streamed response. - repeated SuppressionInfo suppression_info = 2; -} diff --git a/google/cloud/logging_v2/proto/logging_config.proto b/google/cloud/logging_v2/proto/logging_config.proto deleted file mode 100644 index 9b10932d6..000000000 --- a/google/cloud/logging_v2/proto/logging_config.proto +++ /dev/null @@ -1,1567 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.logging.v2; - -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; -import "google/api/annotations.proto"; - -option cc_enable_arenas = true; -option csharp_namespace = "Google.Cloud.Logging.V2"; -option go_package = "google.golang.org/genproto/googleapis/logging/v2;logging"; -option java_multiple_files = true; -option java_outer_classname = "LoggingConfigProto"; -option java_package = "com.google.logging.v2"; -option php_namespace = "Google\\Cloud\\Logging\\V2"; -option ruby_package = "Google::Cloud::Logging::V2"; -option (google.api.resource_definition) = { - type: "logging.googleapis.com/OrganizationLocation" - pattern: "organizations/{organization}/locations/{location}" -}; -option (google.api.resource_definition) = { - type: "logging.googleapis.com/FolderLocation" - pattern: "folders/{folder}/locations/{location}" -}; -option (google.api.resource_definition) = { - type: "logging.googleapis.com/BillingAccountLocation" - pattern: "billingAccounts/{billing_account}/locations/{location}" -}; - -// Service for configuring sinks used to route log entries. -service ConfigServiceV2 { - option (google.api.default_host) = "logging.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform," - "https://www.googleapis.com/auth/cloud-platform.read-only," - "https://www.googleapis.com/auth/logging.admin," - "https://www.googleapis.com/auth/logging.read"; - - // Lists buckets. - rpc ListBuckets(ListBucketsRequest) returns (ListBucketsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*/locations/*}/buckets" - additional_bindings { - get: "/v2/{parent=projects/*/locations/*}/buckets" - } - additional_bindings { - get: "/v2/{parent=organizations/*/locations/*}/buckets" - } - additional_bindings { - get: "/v2/{parent=folders/*/locations/*}/buckets" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*/locations/*}/buckets" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Gets a bucket. - rpc GetBucket(GetBucketRequest) returns (LogBucket) { - option (google.api.http) = { - get: "/v2/{name=*/*/locations/*/buckets/*}" - additional_bindings { - get: "/v2/{name=projects/*/locations/*/buckets/*}" - } - additional_bindings { - get: "/v2/{name=organizations/*/locations/*/buckets/*}" - } - additional_bindings { - get: "/v2/{name=folders/*/locations/*/buckets/*}" - } - additional_bindings { - get: "/v2/{name=billingAccounts/*/buckets/*}" - } - }; - } - - // Creates a bucket that can be used to store log entries. Once a bucket has - // been created, the region cannot be changed. - rpc CreateBucket(CreateBucketRequest) returns (LogBucket) { - option (google.api.http) = { - post: "/v2/{parent=*/*/locations/*}/buckets" - body: "bucket" - additional_bindings { - post: "/v2/{parent=projects/*/locations/*}/buckets" - body: "bucket" - } - additional_bindings { - post: "/v2/{parent=organizations/*/locations/*}/buckets" - body: "bucket" - } - additional_bindings { - post: "/v2/{parent=folders/*/locations/*}/buckets" - body: "bucket" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*/locations/*}/buckets" - body: "bucket" - } - }; - } - - // Updates a bucket. This method replaces the following fields in the - // existing bucket with values from the new bucket: `retention_period` - // - // If the retention period is decreased and the bucket is locked, - // FAILED_PRECONDITION will be returned. - // - // If the bucket has a LifecycleState of DELETE_REQUESTED, FAILED_PRECONDITION - // will be returned. - // - // A buckets region may not be modified after it is created. - rpc UpdateBucket(UpdateBucketRequest) returns (LogBucket) { - option (google.api.http) = { - patch: "/v2/{name=*/*/locations/*/buckets/*}" - body: "bucket" - additional_bindings { - patch: "/v2/{name=projects/*/locations/*/buckets/*}" - body: "bucket" - } - additional_bindings { - patch: "/v2/{name=organizations/*/locations/*/buckets/*}" - body: "bucket" - } - additional_bindings { - patch: "/v2/{name=folders/*/locations/*/buckets/*}" - body: "bucket" - } - additional_bindings { - patch: "/v2/{name=billingAccounts/*/locations/*/buckets/*}" - body: "bucket" - } - }; - } - - // Deletes a bucket. - // Moves the bucket to the DELETE_REQUESTED state. After 7 days, the - // bucket will be purged and all logs in the bucket will be permanently - // deleted. - rpc DeleteBucket(DeleteBucketRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=*/*/locations/*/buckets/*}" - additional_bindings { - delete: "/v2/{name=projects/*/locations/*/buckets/*}" - } - additional_bindings { - delete: "/v2/{name=organizations/*/locations/*/buckets/*}" - } - additional_bindings { - delete: "/v2/{name=folders/*/locations/*/buckets/*}" - } - additional_bindings { - delete: "/v2/{name=billingAccounts/*/locations/*/buckets/*}" - } - }; - } - - // Undeletes a bucket. A bucket that has been deleted may be undeleted within - // the grace period of 7 days. - rpc UndeleteBucket(UndeleteBucketRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/v2/{name=*/*/locations/*/buckets/*}:undelete" - body: "*" - additional_bindings { - post: "/v2/{name=projects/*/locations/*/buckets/*}:undelete" - body: "*" - } - additional_bindings { - post: "/v2/{name=organizations/*/locations/*/buckets/*}:undelete" - body: "*" - } - additional_bindings { - post: "/v2/{name=folders/*/locations/*/buckets/*}:undelete" - body: "*" - } - additional_bindings { - post: "/v2/{name=billingAccounts/*/locations/*/buckets/*}:undelete" - body: "*" - } - }; - } - - // Lists views on a bucket. - rpc ListViews(ListViewsRequest) returns (ListViewsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*/locations/*/buckets/*}/views" - additional_bindings { - get: "/v2/{parent=projects/*/locations/*/buckets/*}/views" - } - additional_bindings { - get: "/v2/{parent=organizations/*/locations/*/buckets/*}/views" - } - additional_bindings { - get: "/v2/{parent=folders/*/locations/*/buckets/*}/views" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Gets a view. - rpc GetView(GetViewRequest) returns (LogView) { - option (google.api.http) = { - get: "/v2/{name=*/*/locations/*/buckets/*/views/*}" - additional_bindings { - get: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - get: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - get: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - get: "/v2/{name=billingAccounts/*/buckets/*/views/*}" - } - }; - } - - // Creates a view over logs in a bucket. A bucket may contain a maximum of - // 50 views. - rpc CreateView(CreateViewRequest) returns (LogView) { - option (google.api.http) = { - post: "/v2/{parent=*/*/locations/*/buckets/*}/views" - body: "view" - additional_bindings { - post: "/v2/{parent=projects/*/locations/*/buckets/*}/views" - body: "view" - } - additional_bindings { - post: "/v2/{parent=organizations/*/locations/*/buckets/*}/views" - body: "view" - } - additional_bindings { - post: "/v2/{parent=folders/*/locations/*/buckets/*}/views" - body: "view" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views" - body: "view" - } - }; - } - - // Updates a view. This method replaces the following fields in the existing - // view with values from the new view: `filter`. - rpc UpdateView(UpdateViewRequest) returns (LogView) { - option (google.api.http) = { - patch: "/v2/{name=*/*/locations/*/buckets/*/views/*}" - body: "view" - additional_bindings { - patch: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" - body: "view" - } - additional_bindings { - patch: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" - body: "view" - } - additional_bindings { - patch: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" - body: "view" - } - additional_bindings { - patch: "/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}" - body: "view" - } - }; - } - - // Deletes a view from a bucket. - rpc DeleteView(DeleteViewRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=*/*/locations/*/buckets/*/views/*}" - additional_bindings { - delete: "/v2/{name=projects/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - delete: "/v2/{name=organizations/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - delete: "/v2/{name=folders/*/locations/*/buckets/*/views/*}" - } - additional_bindings { - delete: "/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}" - } - }; - } - - // Lists sinks. - rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/sinks" - additional_bindings { - get: "/v2/{parent=projects/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=folders/*}/sinks" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/sinks" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Gets a sink. - rpc GetSink(GetSinkRequest) returns (LogSink) { - option (google.api.http) = { - get: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { - get: "/v2/{sink_name=projects/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=organizations/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=folders/*/sinks/*}" - } - additional_bindings { - get: "/v2/{sink_name=billingAccounts/*/sinks/*}" - } - }; - option (google.api.method_signature) = "sink_name"; - } - - // Creates a sink that exports specified log entries to a destination. The - // export of newly-ingested log entries begins immediately, unless the sink's - // `writer_identity` is not permitted to write to the destination. A sink can - // export log entries only from the resource owning the sink. - rpc CreateSink(CreateSinkRequest) returns (LogSink) { - option (google.api.http) = { - post: "/v2/{parent=*/*}/sinks" - body: "sink" - additional_bindings { - post: "/v2/{parent=projects/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=organizations/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=folders/*}/sinks" - body: "sink" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*}/sinks" - body: "sink" - } - }; - option (google.api.method_signature) = "parent,sink"; - } - - // Updates a sink. This method replaces the following fields in the existing - // sink with values from the new sink: `destination`, and `filter`. - // - // The updated sink might also have a new `writer_identity`; see the - // `unique_writer_identity` field. - rpc UpdateSink(UpdateSinkRequest) returns (LogSink) { - option (google.api.http) = { - put: "/v2/{sink_name=*/*/sinks/*}" - body: "sink" - additional_bindings { - put: "/v2/{sink_name=projects/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=organizations/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=folders/*/sinks/*}" - body: "sink" - } - additional_bindings { - put: "/v2/{sink_name=billingAccounts/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=projects/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=organizations/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=folders/*/sinks/*}" - body: "sink" - } - additional_bindings { - patch: "/v2/{sink_name=billingAccounts/*/sinks/*}" - body: "sink" - } - }; - option (google.api.method_signature) = "sink_name,sink,update_mask"; - option (google.api.method_signature) = "sink_name,sink"; - } - - // Deletes a sink. If the sink has a unique `writer_identity`, then that - // service account is also deleted. - rpc DeleteSink(DeleteSinkRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { - delete: "/v2/{sink_name=projects/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=organizations/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=folders/*/sinks/*}" - } - additional_bindings { - delete: "/v2/{sink_name=billingAccounts/*/sinks/*}" - } - }; - option (google.api.method_signature) = "sink_name"; - } - - // Lists all the exclusions in a parent resource. - rpc ListExclusions(ListExclusionsRequest) returns (ListExclusionsResponse) { - option (google.api.http) = { - get: "/v2/{parent=*/*}/exclusions" - additional_bindings { - get: "/v2/{parent=projects/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=organizations/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=folders/*}/exclusions" - } - additional_bindings { - get: "/v2/{parent=billingAccounts/*}/exclusions" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Gets the description of an exclusion. - rpc GetExclusion(GetExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - get: "/v2/{name=*/*/exclusions/*}" - additional_bindings { - get: "/v2/{name=projects/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=organizations/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=folders/*/exclusions/*}" - } - additional_bindings { - get: "/v2/{name=billingAccounts/*/exclusions/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Creates a new exclusion in a specified parent resource. - // Only log entries belonging to that resource can be excluded. - // You can have up to 10 exclusions in a resource. - rpc CreateExclusion(CreateExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - post: "/v2/{parent=*/*}/exclusions" - body: "exclusion" - additional_bindings { - post: "/v2/{parent=projects/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=organizations/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=folders/*}/exclusions" - body: "exclusion" - } - additional_bindings { - post: "/v2/{parent=billingAccounts/*}/exclusions" - body: "exclusion" - } - }; - option (google.api.method_signature) = "parent,exclusion"; - } - - // Changes one or more properties of an existing exclusion. - rpc UpdateExclusion(UpdateExclusionRequest) returns (LogExclusion) { - option (google.api.http) = { - patch: "/v2/{name=*/*/exclusions/*}" - body: "exclusion" - additional_bindings { - patch: "/v2/{name=projects/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=organizations/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=folders/*/exclusions/*}" - body: "exclusion" - } - additional_bindings { - patch: "/v2/{name=billingAccounts/*/exclusions/*}" - body: "exclusion" - } - }; - option (google.api.method_signature) = "name,exclusion,update_mask"; - } - - // Deletes an exclusion. - rpc DeleteExclusion(DeleteExclusionRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v2/{name=*/*/exclusions/*}" - additional_bindings { - delete: "/v2/{name=projects/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=organizations/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=folders/*/exclusions/*}" - } - additional_bindings { - delete: "/v2/{name=billingAccounts/*/exclusions/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Gets the Logs Router CMEK settings for the given resource. - // - // Note: CMEK for the Logs Router can currently only be configured for GCP - // organizations. Once configured, it applies to all projects and folders in - // the GCP organization. - // - // See [Enabling CMEK for Logs - // Router](https://cloud.google.com/logging/docs/routing/managed-encryption) - // for more information. - rpc GetCmekSettings(GetCmekSettingsRequest) returns (CmekSettings) { - option (google.api.http) = { - get: "/v2/{name=*/*}/cmekSettings" - additional_bindings { - get: "/v2/{name=organizations/*}/cmekSettings" - } - }; - } - - // Updates the Logs Router CMEK settings for the given resource. - // - // Note: CMEK for the Logs Router can currently only be configured for GCP - // organizations. Once configured, it applies to all projects and folders in - // the GCP organization. - // - // [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - // will fail if 1) `kms_key_name` is invalid, or 2) the associated service - // account does not have the required - // `roles/cloudkms.cryptoKeyEncrypterDecrypter` role assigned for the key, or - // 3) access to the key is disabled. - // - // See [Enabling CMEK for Logs - // Router](https://cloud.google.com/logging/docs/routing/managed-encryption) - // for more information. - rpc UpdateCmekSettings(UpdateCmekSettingsRequest) returns (CmekSettings) { - option (google.api.http) = { - patch: "/v2/{name=*/*}/cmekSettings" - body: "cmek_settings" - additional_bindings { - patch: "/v2/{name=organizations/*}/cmekSettings" - body: "cmek_settings" - } - }; - } -} - -// Describes a repository of logs. -message LogBucket { - option (google.api.resource) = { - type: "logging.googleapis.com/LogBucket" - pattern: "projects/{project}/locations/{location}/buckets/{bucket}" - pattern: "organizations/{organization}/locations/{location}/buckets/{bucket}" - pattern: "folders/{folder}/locations/{location}/buckets/{bucket}" - pattern: "billingAccounts/{billing_account}/locations/{location}/buckets/{bucket}" - }; - - // The resource name of the bucket. - // For example: - // "projects/my-project-id/locations/my-location/buckets/my-bucket-id The - // supported locations are: - // "global" - // - // For the location of `global` it is unspecified where logs are actually - // stored. - // Once a bucket has been created, the location can not be changed. - string name = 1; - - // Describes this bucket. - string description = 3; - - // Output only. The creation timestamp of the bucket. This is not set for any of the - // default buckets. - google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The last update timestamp of the bucket. - google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Logs will be retained by default for this amount of time, after which they - // will automatically be deleted. The minimum retention period is 1 day. - // If this value is set to zero at bucket creation time, the default time of - // 30 days will be used. - int32 retention_days = 11; - - // Whether the bucket has been locked. - // The retention period on a locked bucket may not be changed. - // Locked buckets may only be deleted if they are empty. - bool locked = 9; - - // Output only. The bucket lifecycle state. - LifecycleState lifecycle_state = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// LogBucket lifecycle states. -enum LifecycleState { - // Unspecified state. This is only used/useful for distinguishing - // unset values. - LIFECYCLE_STATE_UNSPECIFIED = 0; - - // The normal and active state. - ACTIVE = 1; - - // The bucket has been marked for deletion by the user. - DELETE_REQUESTED = 2; -} - -// Describes a view over logs in a bucket. -message LogView { - option (google.api.resource) = { - type: "logging.googleapis.com/LogView" - pattern: "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - pattern: "organizations/{organization}/locations/{location}/buckets/{bucket}/views/{view}" - pattern: "folders/{folder}/locations/{location}/buckets/{bucket}/views/{view}" - pattern: "billingAccounts/{billing_account}/locations/{location}/buckets/{bucket}/views/{view}" - }; - - // The resource name of the view. - // For example - // "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view - string name = 1; - - // Describes this view. - string description = 3; - - // Output only. The creation timestamp of the view. - google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The last update timestamp of the view. - google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Filter that restricts which log entries in a bucket are visible in this - // view. Filters are restricted to be a logical AND of ==/!= of any of the - // following: - // originating project/folder/organization/billing account. - // resource type - // log id - // Example: SOURCE("projects/myproject") AND resource.type = "gce_instance" - // AND LOG_ID("stdout") - string filter = 7; -} - -// Describes a sink used to export log entries to one of the following -// destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a -// Cloud Pub/Sub topic. A logs filter controls which log entries are exported. -// The sink must be created within a project, organization, billing account, or -// folder. -message LogSink { - option (google.api.resource) = { - type: "logging.googleapis.com/LogSink" - pattern: "projects/{project}/sinks/{sink}" - pattern: "organizations/{organization}/sinks/{sink}" - pattern: "folders/{folder}/sinks/{sink}" - pattern: "billingAccounts/{billing_account}/sinks/{sink}" - }; - - // Deprecated. This is unused. - enum VersionFormat { - // An unspecified format version that will default to V2. - VERSION_FORMAT_UNSPECIFIED = 0; - - // `LogEntry` version 2 format. - V2 = 1; - - // `LogEntry` version 1 format. - V1 = 2; - } - - // Required. The client-assigned sink identifier, unique within the project. Example: - // `"my-syslog-errors-to-pubsub"`. Sink identifiers are limited to 100 - // characters and can include only the following characters: upper and - // lower-case alphanumeric characters, underscores, hyphens, and periods. - // First character has to be alphanumeric. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The export destination: - // - // "storage.googleapis.com/[GCS_BUCKET]" - // "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" - // "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" - // - // The sink's `writer_identity`, set when the sink is created, must - // have permission to write to the destination or else the log - // entries are not exported. For more information, see - // [Exporting Logs with - // Sinks](https://cloud.google.com/logging/docs/api/tasks/exporting-logs). - string destination = 3 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "*" - } - ]; - - // Optional. An [advanced logs - // filter](https://cloud.google.com/logging/docs/view/advanced-queries). The - // only exported log entries are those that are in the resource owning the - // sink and that match the filter. For example: - // - // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A description of this sink. - // The maximum length of the description is 8000 characters. - string description = 18 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If set to True, then this sink is disabled and it does not - // export any log entries. - bool disabled = 19 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Log entries that match any of the exclusion filters will not be exported. - // If a log entry is matched by both `filter` and one of `exclusion_filters` - // it will not be exported. - repeated LogExclusion exclusions = 16 [(google.api.field_behavior) = OPTIONAL]; - - // Deprecated. This field is unused. - VersionFormat output_version_format = 6 [deprecated = true]; - - // Output only. An IAM identity—a service account or group—under which Logging - // writes the exported log entries to the sink's destination. This field is - // set by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and - // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] based on the - // value of `unique_writer_identity` in those methods. - // - // Until you grant this identity write-access to the destination, log entry - // exports from this sink will fail. For more information, - // see [Granting Access for a - // Resource](https://cloud.google.com/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). - // Consult the destination service's documentation to determine the - // appropriate IAM roles to assign to the identity. - string writer_identity = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. This field applies only to sinks owned by organizations and - // folders. If the field is false, the default, only the logs owned by the - // sink's parent resource are available for export. If the field is true, then - // logs from all the projects, folders, and billing accounts contained in the - // sink's parent resource are also available for export. Whether a particular - // log entry from the children is exported depends on the sink's filter - // expression. For example, if this field is true, then the filter - // `resource.type=gce_instance` would export all Compute Engine VM instance - // log entries from all projects in the sink's parent. To only export entries - // from certain child projects, filter on the project part of the log name: - // - // logName:("projects/test-project1/" OR "projects/test-project2/") AND - // resource.type=gce_instance - bool include_children = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Destination dependent options. - oneof options { - // Optional. Options that affect sinks exporting data to BigQuery. - BigQueryOptions bigquery_options = 12 [(google.api.field_behavior) = OPTIONAL]; - } - - // Output only. The creation timestamp of the sink. - // - // This field may not be present for older sinks. - google.protobuf.Timestamp create_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The last update timestamp of the sink. - // - // This field may not be present for older sinks. - google.protobuf.Timestamp update_time = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Options that change functionality of a sink exporting data to BigQuery. -message BigQueryOptions { - // Optional. Whether to use [BigQuery's partition - // tables](https://cloud.google.com/bigquery/docs/partitioned-tables). By - // default, Logging creates dated tables based on the log entries' timestamps, - // e.g. syslog_20170523. With partitioned tables the date suffix is no longer - // present and [special query - // syntax](https://cloud.google.com/bigquery/docs/querying-partitioned-tables) - // has to be used instead. In both cases, tables are sharded based on UTC - // timezone. - bool use_partitioned_tables = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. True if new timestamp column based partitioning is in use, - // false if legacy ingestion-time partitioning is in use. - // All new sinks will have this field set true and will use timestamp column - // based partitioning. If use_partitioned_tables is false, this value has no - // meaning and will be false. Legacy sinks using partitioned tables will have - // this field set to false. - bool uses_timestamp_column_partitioning = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The parameters to `ListBuckets`. -message ListBucketsRequest { - // Required. The parent resource whose buckets are to be listed: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]" - // - // Note: The locations portion of the resource must be specified, but - // supplying the character `-` in place of [LOCATION_ID] will return all - // buckets. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogBucket" - } - ]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The response from ListBuckets. -message ListBucketsResponse { - // A list of buckets. - repeated LogBucket buckets = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `CreateBucket`. -message CreateBucketRequest { - // Required. The resource in which to create the bucket: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - // - // Example: `"projects/my-logging-project/locations/global"` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogBucket" - } - ]; - - // Required. A client-assigned identifier such as `"my-bucket"`. Identifiers are - // limited to 100 characters and can include only letters, digits, - // underscores, hyphens, and periods. - string bucket_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The new bucket. The region specified in the new bucket must be compliant - // with any Location Restriction Org Policy. The name field in the bucket is - // ignored. - LogBucket bucket = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to `UpdateBucket`. -message UpdateBucketRequest { - // Required. The full resource name of the bucket to update. - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. Also - // requires permission "resourcemanager.projects.updateLiens" to set the - // locked property - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogBucket" - } - ]; - - // Required. The updated bucket. - LogBucket bucket = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. Field mask that specifies the fields in `bucket` that need an update. A - // bucket field will be overwritten if, and only if, it is in the update - // mask. `name` and output only fields cannot be updated. - // - // For a detailed `FieldMask` definition, see - // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - // - // Example: `updateMask=retention_days`. - google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to `GetBucket`. -message GetBucketRequest { - // Required. The resource name of the bucket: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogBucket" - } - ]; -} - -// The parameters to `DeleteBucket`. -message DeleteBucketRequest { - // Required. The full resource name of the bucket to delete. - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogBucket" - } - ]; -} - -// The parameters to `UndeleteBucket`. -message UndeleteBucketRequest { - // Required. The full resource name of the bucket to undelete. - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogBucket" - } - ]; -} - -// The parameters to `ListViews`. -message ListViewsRequest { - // Required. The bucket whose views are to be listed: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - string parent = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The response from ListViews. -message ListViewsResponse { - // A list of views. - repeated LogView views = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `CreateView`. -message CreateViewRequest { - // Required. The bucket in which to create the view - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - // - // Example: - // `"projects/my-logging-project/locations/my-location/buckets/my-bucket"` - string parent = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The id to use for this view. - string view_id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The new view. - LogView view = 3 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to `UpdateView`. -message UpdateViewRequest { - // Required. The full resource name of the view to update - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The updated view. - LogView view = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Field mask that specifies the fields in `view` that need - // an update. A field will be overwritten if, and only if, it is - // in the update mask. `name` and output only fields cannot be updated. - // - // For a detailed `FieldMask` definition, see - // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - // - // Example: `updateMask=filter`. - google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// The parameters to `GetView`. -message GetViewRequest { - // Required. The resource name of the policy: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogView" - } - ]; -} - -// The parameters to `DeleteView`. -message DeleteViewRequest { - // Required. The full resource name of the view to delete: - // - // "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - // - // Example: - // `"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"`. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogView" - } - ]; -} - -// The parameters to `ListSinks`. -message ListSinksRequest { - // Required. The parent resource whose sinks are to be listed: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogSink" - } - ]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from `ListSinks`. -message ListSinksResponse { - // A list of sinks. - repeated LogSink sinks = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to `GetSink`. -message GetSinkRequest { - // Required. The resource name of the sink: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogSink" - } - ]; -} - -// The parameters to `CreateSink`. -message CreateSinkRequest { - // Required. The resource in which to create the sink: - // - // "projects/[PROJECT_ID]" - // "organizations/[ORGANIZATION_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]" - // "folders/[FOLDER_ID]" - // - // Examples: `"projects/my-logging-project"`, `"organizations/123456789"`. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogSink" - } - ]; - - // Required. The new sink, whose `name` parameter is a sink identifier that - // is not already in use. - LogSink sink = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Determines the kind of IAM identity returned as `writer_identity` - // in the new sink. If this value is omitted or set to false, and if the - // sink's parent is a project, then the value returned as `writer_identity` is - // the same group or service account used by Logging before the addition of - // writer identities to this API. The sink's destination must be in the same - // project as the sink itself. - // - // If this field is set to true, or if the sink is owned by a non-project - // resource such as an organization, then the value of `writer_identity` will - // be a unique service account used only for exports from the new sink. For - // more information, see `writer_identity` in [LogSink][google.logging.v2.LogSink]. - bool unique_writer_identity = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The parameters to `UpdateSink`. -message UpdateSinkRequest { - // Required. The full resource name of the sink to update, including the parent - // resource and the sink identifier: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogSink" - } - ]; - - // Required. The updated sink, whose name is the same identifier that appears as part - // of `sink_name`. - LogSink sink = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. See [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - // for a description of this field. When updating a sink, the effect of this - // field on the value of `writer_identity` in the updated sink depends on both - // the old and new values of this field: - // - // + If the old and new values of this field are both false or both true, - // then there is no change to the sink's `writer_identity`. - // + If the old value is false and the new value is true, then - // `writer_identity` is changed to a unique service account. - // + It is an error if the old value is true and the new value is - // set to false or defaulted to false. - bool unique_writer_identity = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Field mask that specifies the fields in `sink` that need - // an update. A sink field will be overwritten if, and only if, it is - // in the update mask. `name` and output only fields cannot be updated. - // - // An empty updateMask is temporarily treated as using the following mask - // for backwards compatibility purposes: - // destination,filter,includeChildren - // At some point in the future, behavior will be removed and specifying an - // empty updateMask will be an error. - // - // For a detailed `FieldMask` definition, see - // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - // - // Example: `updateMask=filter`. - google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// The parameters to `DeleteSink`. -message DeleteSinkRequest { - // Required. The full resource name of the sink to delete, including the parent - // resource and the sink identifier: - // - // "projects/[PROJECT_ID]/sinks/[SINK_ID]" - // "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - // "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - // "folders/[FOLDER_ID]/sinks/[SINK_ID]" - // - // Example: `"projects/my-project-id/sinks/my-sink-id"`. - string sink_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogSink" - } - ]; -} - -// Specifies a set of log entries that are not to be stored in -// Logging. If your GCP resource receives a large volume of logs, you can -// use exclusions to reduce your chargeable logs. Exclusions are -// processed after log sinks, so you can export log entries before they are -// excluded. Note that organization-level and folder-level exclusions don't -// apply to child resources, and that you can't exclude audit log entries. -message LogExclusion { - option (google.api.resource) = { - type: "logging.googleapis.com/LogExclusion" - pattern: "projects/{project}/exclusions/{exclusion}" - pattern: "organizations/{organization}/exclusions/{exclusion}" - pattern: "folders/{folder}/exclusions/{exclusion}" - pattern: "billingAccounts/{billing_account}/exclusions/{exclusion}" - }; - - // Required. A client-assigned identifier, such as `"load-balancer-exclusion"`. - // Identifiers are limited to 100 characters and can include only letters, - // digits, underscores, hyphens, and periods. First character has to be - // alphanumeric. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A description of this exclusion. - string description = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Required. An [advanced logs - // filter](https://cloud.google.com/logging/docs/view/advanced-queries) that - // matches the log entries to be excluded. By using the [sample - // function](https://cloud.google.com/logging/docs/view/advanced-queries#sample), - // you can exclude less than 100% of the matching log entries. - // For example, the following query matches 99% of low-severity log - // entries from Google Cloud Storage buckets: - // - // `"resource.type=gcs_bucket severity=ERROR" - // - // The maximum length of the filter is 20000 characters. - string filter = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The metric descriptor associated with the logs-based metric. - // If unspecified, it uses a default metric descriptor with a DELTA metric - // kind, INT64 value type, with no labels and a unit of "1". Such a metric - // counts the number of log entries matching the `filter` expression. - // - // The `name`, `type`, and `description` fields in the `metric_descriptor` - // are output only, and is constructed using the `name` and `description` - // field in the LogMetric. - // - // To create a logs-based metric that records a distribution of log values, a - // DELTA metric kind with a DISTRIBUTION value type must be used along with - // a `value_extractor` expression in the LogMetric. - // - // Each label in the metric descriptor must have a matching label - // name as the key and an extractor expression as the value in the - // `label_extractors` map. - // - // The `metric_kind` and `value_type` fields in the `metric_descriptor` cannot - // be updated once initially configured. New labels can be added in the - // `metric_descriptor`, but existing labels cannot be modified except for - // their description. - google.api.MetricDescriptor metric_descriptor = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A `value_extractor` is required when using a distribution - // logs-based metric to extract the values to record from a log entry. - // Two functions are supported for value extraction: `EXTRACT(field)` or - // `REGEXP_EXTRACT(field, regex)`. The argument are: - // 1. field: The name of the log entry field from which the value is to be - // extracted. - // 2. regex: A regular expression using the Google RE2 syntax - // (https://github.com/google/re2/wiki/Syntax) with a single capture - // group to extract data from the specified log entry field. The value - // of the field is converted to a string before applying the regex. - // It is an error to specify a regex that does not include exactly one - // capture group. - // - // The result of the extraction must be convertible to a double type, as the - // distribution always records double values. If either the extraction or - // the conversion to double fails, then those values are not recorded in the - // distribution. - // - // Example: `REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")` - string value_extractor = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A map from a label key string to an extractor expression which is - // used to extract data from a log entry field and assign as the label value. - // Each label key specified in the LabelDescriptor must have an associated - // extractor expression in this map. The syntax of the extractor expression - // is the same as for the `value_extractor` field. - // - // The extracted value is converted to the type defined in the label - // descriptor. If the either the extraction or the type conversion fails, - // the label will have a default value. The default value for a string - // label is an empty string, for an integer label its 0, and for a boolean - // label its `false`. - // - // Note that there are upper bounds on the maximum number of labels and the - // number of active time series that are allowed in a project. - map label_extractors = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The `bucket_options` are required when the logs-based metric is - // using a DISTRIBUTION value type and it describes the bucket boundaries - // used to create a histogram of the extracted values. - google.api.Distribution.BucketOptions bucket_options = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The creation timestamp of the metric. - // - // This field may not be present for older metrics. - google.protobuf.Timestamp create_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The last update timestamp of the metric. - // - // This field may not be present for older metrics. - google.protobuf.Timestamp update_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Deprecated. The API version that created or updated this metric. - // The v2 format is used by default and cannot be changed. - ApiVersion version = 4 [deprecated = true]; -} - -// The parameters to ListLogMetrics. -message ListLogMetricsRequest { - // Required. The name of the project containing the metrics: - // - // "projects/[PROJECT_ID]" - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" - } - ]; - - // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method - // parameters should be identical to those in the previous call. - string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the - // response indicates that more results might be available. - int32 page_size = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Result returned from ListLogMetrics. -message ListLogMetricsResponse { - // A list of logs-based metrics. - repeated LogMetric metrics = 1; - - // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call this - // method again using the value of `nextPageToken` as `pageToken`. - string next_page_token = 2; -} - -// The parameters to GetLogMetric. -message GetLogMetricRequest { - // Required. The resource name of the desired metric: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - string metric_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogMetric" - } - ]; -} - -// The parameters to CreateLogMetric. -message CreateLogMetricRequest { - // Required. The resource name of the project in which to create the metric: - // - // "projects/[PROJECT_ID]" - // - // The new metric must be provided in the request. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "logging.googleapis.com/LogMetric" - } - ]; - - // Required. The new logs-based metric, which must not have an identifier that - // already exists. - LogMetric metric = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to UpdateLogMetric. -message UpdateLogMetricRequest { - // Required. The resource name of the metric to update: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - // - // The updated metric must be provided in the request and it's - // `name` field must be the same as `[METRIC_ID]` If the metric - // does not exist in `[PROJECT_ID]`, then a new metric is created. - string metric_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogMetric" - } - ]; - - // Required. The updated metric. - LogMetric metric = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The parameters to DeleteLogMetric. -message DeleteLogMetricRequest { - // Required. The resource name of the metric to delete: - // - // "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - string metric_name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "logging.googleapis.com/LogMetric" - } - ]; -} diff --git a/google/cloud/logging_v2/services/__init__.py b/google/cloud/logging_v2/services/__init__.py index 42ffdf2bc..4de65971c 100644 --- a/google/cloud/logging_v2/services/__init__.py +++ b/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/logging_v2/services/config_service_v2/__init__.py b/google/cloud/logging_v2/services/config_service_v2/__init__.py index 4ab8f4d40..e7f604280 100644 --- a/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import ConfigServiceV2Client from .async_client import ConfigServiceV2AsyncClient diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index ef184d61c..634c106b6 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,17 +20,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport from .client import ConfigServiceV2Client @@ -60,31 +57,26 @@ class ConfigServiceV2AsyncClient: parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) - common_billing_account_path = staticmethod( ConfigServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( ConfigServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( ConfigServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( ConfigServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( ConfigServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(ConfigServiceV2Client.common_project_path) parse_common_project_path = staticmethod( ConfigServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(ConfigServiceV2Client.common_location_path) parse_common_location_path = staticmethod( ConfigServiceV2Client.parse_common_location_path @@ -92,7 +84,8 @@ class ConfigServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -107,7 +100,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -124,7 +117,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ConfigServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: ConfigServiceV2Transport: The transport used by the client instance. @@ -138,12 +131,12 @@ def transport(self) -> ConfigServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the config service v2 client. + """Instantiates the config service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -175,7 +168,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = ConfigServiceV2Client( credentials=credentials, transport=transport, @@ -215,7 +207,6 @@ async def list_buckets( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -244,7 +235,6 @@ async def list_buckets( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -287,7 +277,6 @@ async def get_bucket( Args: request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): The request object. The parameters to `GetBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -299,7 +288,6 @@ async def get_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.GetBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -337,7 +325,6 @@ async def create_bucket( Args: request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): The request object. The parameters to `CreateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -349,7 +336,6 @@ async def create_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -395,7 +381,6 @@ async def update_bucket( Args: request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): The request object. The parameters to `UpdateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -407,7 +392,6 @@ async def update_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -445,7 +429,6 @@ async def delete_bucket( Args: request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): The request object. The parameters to `DeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -453,7 +436,6 @@ async def delete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -489,7 +471,6 @@ async def undelete_bucket( Args: request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): The request object. The parameters to `UndeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -497,7 +478,6 @@ async def undelete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.UndeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -543,7 +523,6 @@ async def list_views( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -572,7 +551,6 @@ async def list_views( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -615,7 +593,6 @@ async def get_view( Args: request (:class:`google.cloud.logging_v2.types.GetViewRequest`): The request object. The parameters to `GetView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -629,7 +606,6 @@ async def get_view( """ # Create or coerce a protobuf request object. - request = logging_config.GetViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -666,7 +642,6 @@ async def create_view( Args: request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): The request object. The parameters to `CreateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -680,7 +655,6 @@ async def create_view( """ # Create or coerce a protobuf request object. - request = logging_config.CreateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -717,7 +691,6 @@ async def update_view( Args: request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): The request object. The parameters to `UpdateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -731,7 +704,6 @@ async def update_view( """ # Create or coerce a protobuf request object. - request = logging_config.UpdateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -767,7 +739,6 @@ async def delete_view( Args: request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): The request object. The parameters to `DeleteView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -775,7 +746,6 @@ async def delete_view( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -825,7 +795,6 @@ async def list_sinks( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -854,7 +823,6 @@ async def list_sinks( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -867,9 +835,9 @@ async def list_sinks( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -924,7 +892,6 @@ async def get_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -958,7 +925,6 @@ async def get_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -971,9 +937,9 @@ async def get_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1037,7 +1003,6 @@ async def create_sink( This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1071,7 +1036,6 @@ async def create_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if sink is not None: @@ -1103,7 +1067,7 @@ async def update_sink( *, sink_name: str = None, sink: logging_config.LogSink = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1161,7 +1125,6 @@ async def update_sink( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1195,7 +1158,6 @@ async def update_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name if sink is not None: @@ -1212,9 +1174,9 @@ async def update_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1267,7 +1229,6 @@ async def delete_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1288,7 +1249,6 @@ async def delete_sink( # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1301,9 +1261,9 @@ async def delete_sink( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1352,7 +1312,6 @@ async def list_exclusions( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1381,7 +1340,6 @@ async def list_exclusions( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1394,9 +1352,9 @@ async def list_exclusions( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1452,7 +1410,6 @@ async def get_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1488,7 +1445,6 @@ async def get_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1501,9 +1457,9 @@ async def get_exclusion( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1566,7 +1522,6 @@ async def create_exclusion( This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1602,7 +1557,6 @@ async def create_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if exclusion is not None: @@ -1634,7 +1588,7 @@ async def update_exclusion( *, name: str = None, exclusion: logging_config.LogExclusion = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1683,7 +1637,6 @@ async def update_exclusion( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1719,7 +1672,6 @@ async def update_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if exclusion is not None: @@ -1778,7 +1730,6 @@ async def delete_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1799,7 +1750,6 @@ async def delete_exclusion( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1812,9 +1762,9 @@ async def delete_exclusion( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1858,7 +1808,6 @@ async def get_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1882,7 +1831,6 @@ async def get_cmek_settings( """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1936,7 +1884,6 @@ async def update_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1960,7 +1907,6 @@ async def update_cmek_settings( """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index 37a28d7a2..d2b323227 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -34,9 +32,8 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import ConfigServiceV2GrpcTransport from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport @@ -57,7 +54,7 @@ class ConfigServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[ConfigServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -80,7 +77,8 @@ class ConfigServiceV2Client(metaclass=ConfigServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -114,7 +112,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -131,7 +130,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -150,34 +149,35 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> ConfigServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - ConfigServiceV2Transport: The transport used by the client instance. + ConfigServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def cmek_settings_path(project: str,) -> str: - """Return a fully-qualified cmek_settings string.""" + """Returns a fully-qualified cmek_settings string.""" return "projects/{project}/cmekSettings".format(project=project,) @staticmethod def parse_cmek_settings_path(path: str) -> Dict[str, str]: - """Parse a cmek_settings path into its component segments.""" + """Parses a cmek_settings path into its component segments.""" m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} @staticmethod def log_bucket_path(project: str, location: str, bucket: str,) -> str: - """Return a fully-qualified log_bucket string.""" + """Returns a fully-qualified log_bucket string.""" return "projects/{project}/locations/{location}/buckets/{bucket}".format( project=project, location=location, bucket=bucket, ) @staticmethod def parse_log_bucket_path(path: str) -> Dict[str, str]: - """Parse a log_bucket path into its component segments.""" + """Parses a log_bucket path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path, @@ -186,38 +186,38 @@ def parse_log_bucket_path(path: str) -> Dict[str, str]: @staticmethod def log_exclusion_path(project: str, exclusion: str,) -> str: - """Return a fully-qualified log_exclusion string.""" + """Returns a fully-qualified log_exclusion string.""" return "projects/{project}/exclusions/{exclusion}".format( project=project, exclusion=exclusion, ) @staticmethod def parse_log_exclusion_path(path: str) -> Dict[str, str]: - """Parse a log_exclusion path into its component segments.""" + """Parses a log_exclusion path into its component segments.""" m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def log_sink_path(project: str, sink: str,) -> str: - """Return a fully-qualified log_sink string.""" + """Returns a fully-qualified log_sink string.""" return "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) @staticmethod def parse_log_sink_path(path: str) -> Dict[str, str]: - """Parse a log_sink path into its component segments.""" + """Parses a log_sink path into its component segments.""" m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def log_view_path(project: str, location: str, bucket: str, view: str,) -> str: - """Return a fully-qualified log_view string.""" + """Returns a fully-qualified log_view string.""" return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( project=project, location=location, bucket=bucket, view=view, ) @staticmethod def parse_log_view_path(path: str) -> Dict[str, str]: - """Parse a log_view path into its component segments.""" + """Parses a log_view path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path, @@ -226,7 +226,7 @@ def parse_log_view_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -239,7 +239,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -250,7 +250,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -261,7 +261,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -272,7 +272,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -286,12 +286,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ConfigServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the config service v2 client. + """Instantiates the config service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -346,9 +346,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -360,12 +361,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -380,8 +383,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -428,7 +431,6 @@ def list_buckets( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -459,10 +461,8 @@ def list_buckets( # there are no flattened fields. if not isinstance(request, logging_config.ListBucketsRequest): request = logging_config.ListBucketsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -501,7 +501,6 @@ def get_bucket( Args: request (google.cloud.logging_v2.types.GetBucketRequest): The request object. The parameters to `GetBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -513,7 +512,6 @@ def get_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetBucketRequest. # There's no risk of modifying the input as we've already verified @@ -552,7 +550,6 @@ def create_bucket( Args: request (google.cloud.logging_v2.types.CreateBucketRequest): The request object. The parameters to `CreateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -564,7 +561,6 @@ def create_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.CreateBucketRequest. # There's no risk of modifying the input as we've already verified @@ -611,7 +607,6 @@ def update_bucket( Args: request (google.cloud.logging_v2.types.UpdateBucketRequest): The request object. The parameters to `UpdateBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -623,7 +618,6 @@ def update_bucket( Describes a repository of logs. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateBucketRequest. # There's no risk of modifying the input as we've already verified @@ -662,7 +656,6 @@ def delete_bucket( Args: request (google.cloud.logging_v2.types.DeleteBucketRequest): The request object. The parameters to `DeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -670,7 +663,6 @@ def delete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.DeleteBucketRequest. # There's no risk of modifying the input as we've already verified @@ -707,7 +699,6 @@ def undelete_bucket( Args: request (google.cloud.logging_v2.types.UndeleteBucketRequest): The request object. The parameters to `UndeleteBucket`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -715,7 +706,6 @@ def undelete_bucket( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UndeleteBucketRequest. # There's no risk of modifying the input as we've already verified @@ -762,7 +752,6 @@ def list_views( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -793,10 +782,8 @@ def list_views( # there are no flattened fields. if not isinstance(request, logging_config.ListViewsRequest): request = logging_config.ListViewsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -835,7 +822,6 @@ def get_view( Args: request (google.cloud.logging_v2.types.GetViewRequest): The request object. The parameters to `GetView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -849,7 +835,6 @@ def get_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetViewRequest. # There's no risk of modifying the input as we've already verified @@ -887,7 +872,6 @@ def create_view( Args: request (google.cloud.logging_v2.types.CreateViewRequest): The request object. The parameters to `CreateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -901,7 +885,6 @@ def create_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.CreateViewRequest. # There's no risk of modifying the input as we've already verified @@ -939,7 +922,6 @@ def update_view( Args: request (google.cloud.logging_v2.types.UpdateViewRequest): The request object. The parameters to `UpdateView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -953,7 +935,6 @@ def update_view( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateViewRequest. # There's no risk of modifying the input as we've already verified @@ -990,7 +971,6 @@ def delete_view( Args: request (google.cloud.logging_v2.types.DeleteViewRequest): The request object. The parameters to `DeleteView`. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -998,7 +978,6 @@ def delete_view( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.DeleteViewRequest. # There's no risk of modifying the input as we've already verified @@ -1049,7 +1028,6 @@ def list_sinks( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1080,10 +1058,8 @@ def list_sinks( # there are no flattened fields. if not isinstance(request, logging_config.ListSinksRequest): request = logging_config.ListSinksRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1138,7 +1114,6 @@ def get_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1174,10 +1149,8 @@ def get_sink( # there are no flattened fields. if not isinstance(request, logging_config.GetSinkRequest): request = logging_config.GetSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1241,7 +1214,6 @@ def create_sink( This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1277,10 +1249,8 @@ def create_sink( # there are no flattened fields. if not isinstance(request, logging_config.CreateSinkRequest): request = logging_config.CreateSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if sink is not None: @@ -1308,7 +1278,7 @@ def update_sink( *, sink_name: str = None, sink: logging_config.LogSink = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1366,7 +1336,6 @@ def update_sink( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1402,10 +1371,8 @@ def update_sink( # there are no flattened fields. if not isinstance(request, logging_config.UpdateSinkRequest): request = logging_config.UpdateSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name if sink is not None: @@ -1462,7 +1429,6 @@ def delete_sink( This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1485,10 +1451,8 @@ def delete_sink( # there are no flattened fields. if not isinstance(request, logging_config.DeleteSinkRequest): request = logging_config.DeleteSinkRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if sink_name is not None: request.sink_name = sink_name @@ -1537,7 +1501,6 @@ def list_exclusions( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1568,10 +1531,8 @@ def list_exclusions( # there are no flattened fields. if not isinstance(request, logging_config.ListExclusionsRequest): request = logging_config.ListExclusionsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -1627,7 +1588,6 @@ def get_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1665,10 +1625,8 @@ def get_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.GetExclusionRequest): request = logging_config.GetExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -1731,7 +1689,6 @@ def create_exclusion( This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1769,10 +1726,8 @@ def create_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.CreateExclusionRequest): request = logging_config.CreateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if exclusion is not None: @@ -1800,7 +1755,7 @@ def update_exclusion( *, name: str = None, exclusion: logging_config.LogExclusion = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -1849,7 +1804,6 @@ def update_exclusion( This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1887,10 +1841,8 @@ def update_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.UpdateExclusionRequest): request = logging_config.UpdateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if exclusion is not None: @@ -1945,7 +1897,6 @@ def delete_exclusion( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1968,10 +1919,8 @@ def delete_exclusion( # there are no flattened fields. if not isinstance(request, logging_config.DeleteExclusionRequest): request = logging_config.DeleteExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -2015,7 +1964,6 @@ def get_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2039,7 +1987,6 @@ def get_cmek_settings( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.GetCmekSettingsRequest. # There's no risk of modifying the input as we've already verified @@ -2094,7 +2041,6 @@ def update_cmek_settings( See [Enabling CMEK for Logs Router](https://cloud.google.com/logging/docs/routing/managed- encryption) for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2118,7 +2064,6 @@ def update_cmek_settings( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging_config.UpdateCmekSettingsRequest. # There's no risk of modifying the input as we've already verified diff --git a/google/cloud/logging_v2/services/config_service_v2/pagers.py b/google/cloud/logging_v2/services/config_service_v2/pagers.py index f656fef0d..6d8e11fb8 100644 --- a/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -373,7 +371,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -501,7 +499,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 30282e2d2..b1e24fc64 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 3981d8e9f..d52c97635 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" @@ -47,21 +57,24 @@ class ConfigServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.read", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -70,7 +83,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -84,29 +97,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -150,9 +210,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -166,9 +226,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -185,9 +245,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -201,9 +261,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -217,9 +277,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -233,9 +293,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -255,9 +315,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -277,11 +337,11 @@ def _prep_wrapped_messages(self, client_info): @property def list_buckets( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListBucketsRequest], - typing.Union[ + Union[ logging_config.ListBucketsResponse, - typing.Awaitable[logging_config.ListBucketsResponse], + Awaitable[logging_config.ListBucketsResponse], ], ]: raise NotImplementedError() @@ -289,62 +349,56 @@ def list_buckets( @property def get_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def create_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def update_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateBucketRequest], - typing.Union[ - logging_config.LogBucket, typing.Awaitable[logging_config.LogBucket] - ], + Union[logging_config.LogBucket, Awaitable[logging_config.LogBucket]], ]: raise NotImplementedError() @property def delete_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteBucketRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def undelete_bucket( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UndeleteBucketRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_views( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListViewsRequest], - typing.Union[ + Union[ logging_config.ListViewsResponse, - typing.Awaitable[logging_config.ListViewsResponse], + Awaitable[logging_config.ListViewsResponse], ], ]: raise NotImplementedError() @@ -352,47 +406,47 @@ def list_views( @property def get_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def create_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def update_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateViewRequest], - typing.Union[logging_config.LogView, typing.Awaitable[logging_config.LogView]], + Union[logging_config.LogView, Awaitable[logging_config.LogView]], ]: raise NotImplementedError() @property def delete_view( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteViewRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_sinks( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListSinksRequest], - typing.Union[ + Union[ logging_config.ListSinksResponse, - typing.Awaitable[logging_config.ListSinksResponse], + Awaitable[logging_config.ListSinksResponse], ], ]: raise NotImplementedError() @@ -400,47 +454,47 @@ def list_sinks( @property def get_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def create_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def update_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateSinkRequest], - typing.Union[logging_config.LogSink, typing.Awaitable[logging_config.LogSink]], + Union[logging_config.LogSink, Awaitable[logging_config.LogSink]], ]: raise NotImplementedError() @property def delete_sink( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteSinkRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def list_exclusions( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.ListExclusionsRequest], - typing.Union[ + Union[ logging_config.ListExclusionsResponse, - typing.Awaitable[logging_config.ListExclusionsResponse], + Awaitable[logging_config.ListExclusionsResponse], ], ]: raise NotImplementedError() @@ -448,64 +502,54 @@ def list_exclusions( @property def get_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def create_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.CreateExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def update_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateExclusionRequest], - typing.Union[ - logging_config.LogExclusion, typing.Awaitable[logging_config.LogExclusion] - ], + Union[logging_config.LogExclusion, Awaitable[logging_config.LogExclusion]], ]: raise NotImplementedError() @property def delete_exclusion( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.DeleteExclusionRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def get_cmek_settings( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.GetCmekSettingsRequest], - typing.Union[ - logging_config.CmekSettings, typing.Awaitable[logging_config.CmekSettings] - ], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], ]: raise NotImplementedError() @property def update_cmek_settings( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_config.UpdateCmekSettingsRequest], - typing.Union[ - logging_config.CmekSettings, typing.Awaitable[logging_config.CmekSettings] - ], + Union[logging_config.CmekSettings, Awaitable[logging_config.CmekSettings]], ]: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 67f2ea705..327cc79c3 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -343,7 +343,7 @@ def update_bucket( @property def delete_bucket( self, - ) -> Callable[[logging_config.DeleteBucketRequest], empty.Empty]: + ) -> Callable[[logging_config.DeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -364,14 +364,14 @@ def delete_bucket( self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_bucket"] @property def undelete_bucket( self, - ) -> Callable[[logging_config.UndeleteBucketRequest], empty.Empty]: + ) -> Callable[[logging_config.UndeleteBucketRequest], empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a bucket. A bucket that has been deleted @@ -391,7 +391,7 @@ def undelete_bucket( self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["undelete_bucket"] @@ -502,7 +502,9 @@ def update_view( return self._stubs["update_view"] @property - def delete_view(self) -> Callable[[logging_config.DeleteViewRequest], empty.Empty]: + def delete_view( + self, + ) -> Callable[[logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. Deletes a view from a bucket. @@ -521,7 +523,7 @@ def delete_view(self) -> Callable[[logging_config.DeleteViewRequest], empty.Empt self._stubs["delete_view"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_view"] @@ -639,7 +641,9 @@ def update_sink( return self._stubs["update_sink"] @property - def delete_sink(self) -> Callable[[logging_config.DeleteSinkRequest], empty.Empty]: + def delete_sink( + self, + ) -> Callable[[logging_config.DeleteSinkRequest], empty_pb2.Empty]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -659,7 +663,7 @@ def delete_sink(self) -> Callable[[logging_config.DeleteSinkRequest], empty.Empt self._stubs["delete_sink"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_sink"] @@ -776,7 +780,7 @@ def update_exclusion( @property def delete_exclusion( self, - ) -> Callable[[logging_config.DeleteExclusionRequest], empty.Empty]: + ) -> Callable[[logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion. @@ -795,7 +799,7 @@ def delete_exclusion( self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_exclusion"] diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 3eabb2bcd..3d7d271bf 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport @@ -54,7 +51,7 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -354,7 +353,7 @@ def update_bucket( @property def delete_bucket( self, - ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -375,14 +374,14 @@ def delete_bucket( self._stubs["delete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteBucket", request_serializer=logging_config.DeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_bucket"] @property def undelete_bucket( self, - ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.UndeleteBucketRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. Undeletes a bucket. A bucket that has been deleted @@ -402,7 +401,7 @@ def undelete_bucket( self._stubs["undelete_bucket"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/UndeleteBucket", request_serializer=logging_config.UndeleteBucketRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["undelete_bucket"] @@ -521,7 +520,7 @@ def update_view( @property def delete_view( self, - ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. Deletes a view from a bucket. @@ -540,7 +539,7 @@ def delete_view( self._stubs["delete_view"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteView", request_serializer=logging_config.DeleteViewRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_view"] @@ -666,7 +665,7 @@ def update_sink( @property def delete_sink( self, - ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteSinkRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete sink method over gRPC. Deletes a sink. If the sink has a unique ``writer_identity``, @@ -686,7 +685,7 @@ def delete_sink( self._stubs["delete_sink"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteSink", request_serializer=logging_config.DeleteSinkRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_sink"] @@ -810,7 +809,7 @@ def update_exclusion( @property def delete_exclusion( self, - ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. Deletes an exclusion. @@ -829,7 +828,7 @@ def delete_exclusion( self._stubs["delete_exclusion"] = self.grpc_channel.unary_unary( "/google.logging.v2.ConfigServiceV2/DeleteExclusion", request_serializer=logging_config.DeleteExclusionRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_exclusion"] diff --git a/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/__init__.py index c46b48a29..bd7a79820 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import LoggingServiceV2Client from .async_client import LoggingServiceV2AsyncClient diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index f61556922..6a11e96cb 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -31,17 +29,16 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging - from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client @@ -57,31 +54,26 @@ class LoggingServiceV2AsyncClient: log_path = staticmethod(LoggingServiceV2Client.log_path) parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) - common_billing_account_path = staticmethod( LoggingServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( LoggingServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( LoggingServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( LoggingServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( LoggingServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) parse_common_project_path = staticmethod( LoggingServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) parse_common_location_path = staticmethod( LoggingServiceV2Client.parse_common_location_path @@ -89,7 +81,8 @@ class LoggingServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -104,7 +97,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -121,7 +114,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> LoggingServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: LoggingServiceV2Transport: The transport used by the client instance. @@ -135,12 +128,12 @@ def transport(self) -> LoggingServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the logging service v2 client. + """Instantiates the logging service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -172,7 +165,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = LoggingServiceV2Client( credentials=credentials, transport=transport, @@ -217,7 +209,6 @@ async def delete_log( This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -238,7 +229,6 @@ async def delete_log( # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name @@ -251,9 +241,9 @@ async def delete_log( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -277,7 +267,7 @@ async def write_log_entries( request: logging.WriteLogEntriesRequest = None, *, log_name: str = None, - resource: monitored_resource.MonitoredResource = None, + resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -383,7 +373,6 @@ async def write_log_entries( This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -408,7 +397,6 @@ async def write_log_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name if resource is not None: @@ -416,7 +404,6 @@ async def write_log_entries( if labels: request.labels.update(labels) - if entries: request.entries.extend(entries) @@ -429,9 +416,9 @@ async def write_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -514,7 +501,6 @@ async def list_log_entries( This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -543,12 +529,10 @@ async def list_log_entries( # If we have keyword arguments corresponding to fields on the # request, apply these. - if filter is not None: request.filter = filter if order_by is not None: request.order_by = order_by - if resource_names: request.resource_names.extend(resource_names) @@ -561,9 +545,9 @@ async def list_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -598,7 +582,6 @@ async def list_monitored_resource_descriptors( request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -615,7 +598,6 @@ async def list_monitored_resource_descriptors( """ # Create or coerce a protobuf request object. - request = logging.ListMonitoredResourceDescriptorsRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -627,9 +609,9 @@ async def list_monitored_resource_descriptors( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -678,7 +660,6 @@ async def list_logs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -707,7 +688,6 @@ async def list_logs( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -720,9 +700,9 @@ async def list_logs( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -783,9 +763,9 @@ def tail_log_entries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=3600.0, ), diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 00d758ab5..dd94b6721 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -33,20 +31,19 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging - from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -67,7 +64,7 @@ class LoggingServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[LoggingServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -90,7 +87,8 @@ class LoggingServiceV2Client(metaclass=LoggingServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -124,7 +122,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -141,7 +140,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -160,27 +159,28 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> LoggingServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - LoggingServiceV2Transport: The transport used by the client instance. + LoggingServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def log_path(project: str, log: str,) -> str: - """Return a fully-qualified log string.""" + """Returns a fully-qualified log string.""" return "projects/{project}/logs/{log}".format(project=project, log=log,) @staticmethod def parse_log_path(path: str) -> Dict[str, str]: - """Parse a log path into its component segments.""" + """Parses a log path into its component segments.""" m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -193,7 +193,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -204,7 +204,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -215,7 +215,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -226,7 +226,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -240,12 +240,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, LoggingServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the logging service v2 client. + """Instantiates the logging service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -300,9 +300,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -314,12 +315,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -334,8 +337,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -387,7 +390,6 @@ def delete_log( This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -410,10 +412,8 @@ def delete_log( # there are no flattened fields. if not isinstance(request, logging.DeleteLogRequest): request = logging.DeleteLogRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name @@ -437,7 +437,7 @@ def write_log_entries( request: logging.WriteLogEntriesRequest = None, *, log_name: str = None, - resource: monitored_resource.MonitoredResource = None, + resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -543,7 +543,6 @@ def write_log_entries( This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -570,10 +569,8 @@ def write_log_entries( # there are no flattened fields. if not isinstance(request, logging.WriteLogEntriesRequest): request = logging.WriteLogEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if log_name is not None: request.log_name = log_name if resource is not None: @@ -662,7 +659,6 @@ def list_log_entries( This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -693,10 +689,8 @@ def list_log_entries( # there are no flattened fields. if not isinstance(request, logging.ListLogEntriesRequest): request = logging.ListLogEntriesRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if resource_names is not None: request.resource_names = resource_names if filter is not None: @@ -735,7 +729,6 @@ def list_monitored_resource_descriptors( request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -752,7 +745,6 @@ def list_monitored_resource_descriptors( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a logging.ListMonitoredResourceDescriptorsRequest. # There's no risk of modifying the input as we've already verified @@ -807,7 +799,6 @@ def list_logs( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -838,10 +829,8 @@ def list_logs( # there are no flattened fields. if not isinstance(request, logging.ListLogsRequest): request = logging.ListLogsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent diff --git a/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 7ab8ac8d2..b06007cb4 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -26,7 +24,7 @@ Optional, ) -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging @@ -119,7 +117,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -213,7 +211,7 @@ def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[monitored_resource.MonitoredResourceDescriptor]: + def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: for page in self.pages: yield from page.resource_descriptors @@ -249,7 +247,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -281,7 +279,7 @@ async def pages( def __aiter__( self, - ) -> AsyncIterable[monitored_resource.MonitoredResourceDescriptor]: + ) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: for response in page.resource_descriptors: @@ -381,7 +379,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index cd979b771..65e713121 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 66003ef95..fdcbead00 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" @@ -48,21 +58,24 @@ class LoggingServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.write", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -71,7 +84,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -85,29 +98,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -118,9 +178,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -134,9 +194,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -150,9 +210,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -166,9 +226,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -182,9 +242,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -198,9 +258,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=3600.0, ), @@ -212,20 +272,18 @@ def _prep_wrapped_messages(self, client_info): @property def delete_log( self, - ) -> typing.Callable[ - [logging.DeleteLogRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [logging.DeleteLogRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def write_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.WriteLogEntriesRequest], - typing.Union[ - logging.WriteLogEntriesResponse, - typing.Awaitable[logging.WriteLogEntriesResponse], + Union[ + logging.WriteLogEntriesResponse, Awaitable[logging.WriteLogEntriesResponse] ], ]: raise NotImplementedError() @@ -233,11 +291,10 @@ def write_log_entries( @property def list_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListLogEntriesRequest], - typing.Union[ - logging.ListLogEntriesResponse, - typing.Awaitable[logging.ListLogEntriesResponse], + Union[ + logging.ListLogEntriesResponse, Awaitable[logging.ListLogEntriesResponse] ], ]: raise NotImplementedError() @@ -245,11 +302,11 @@ def list_log_entries( @property def list_monitored_resource_descriptors( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListMonitoredResourceDescriptorsRequest], - typing.Union[ + Union[ logging.ListMonitoredResourceDescriptorsResponse, - typing.Awaitable[logging.ListMonitoredResourceDescriptorsResponse], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse], ], ]: raise NotImplementedError() @@ -257,22 +314,19 @@ def list_monitored_resource_descriptors( @property def list_logs( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.ListLogsRequest], - typing.Union[ - logging.ListLogsResponse, typing.Awaitable[logging.ListLogsResponse] - ], + Union[logging.ListLogsResponse, Awaitable[logging.ListLogsResponse]], ]: raise NotImplementedError() @property def tail_log_entries( self, - ) -> typing.Callable[ + ) -> Callable[ [logging.TailLogEntriesRequest], - typing.Union[ - logging.TailLogEntriesResponse, - typing.Awaitable[logging.TailLogEntriesResponse], + Union[ + logging.TailLogEntriesResponse, Awaitable[logging.TailLogEntriesResponse] ], ]: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index b52d306f3..5e5c1ad0c 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -223,7 +223,7 @@ def grpc_channel(self) -> grpc.Channel: return self._grpc_channel @property - def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty.Empty]: + def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log. The log @@ -246,7 +246,7 @@ def delete_log(self) -> Callable[[logging.DeleteLogRequest], empty.Empty]: self._stubs["delete_log"] = self.grpc_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log"] diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 0ba87029c..1f33ad78a 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport @@ -54,7 +51,7 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -229,7 +228,7 @@ def grpc_channel(self) -> aio.Channel: @property def delete_log( self, - ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging.DeleteLogRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. Deletes all the log entries in a log. The log @@ -252,7 +251,7 @@ def delete_log( self._stubs["delete_log"] = self.grpc_channel.unary_unary( "/google.logging.v2.LoggingServiceV2/DeleteLog", request_serializer=logging.DeleteLogRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log"] diff --git a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index c857ea037..f37e39314 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import MetricsServiceV2Client from .async_client import MetricsServiceV2AsyncClient diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 93a652b79..defd64a13 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,19 +20,17 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport from .client import MetricsServiceV2Client @@ -50,31 +46,26 @@ class MetricsServiceV2AsyncClient: log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) - common_billing_account_path = staticmethod( MetricsServiceV2Client.common_billing_account_path ) parse_common_billing_account_path = staticmethod( MetricsServiceV2Client.parse_common_billing_account_path ) - common_folder_path = staticmethod(MetricsServiceV2Client.common_folder_path) parse_common_folder_path = staticmethod( MetricsServiceV2Client.parse_common_folder_path ) - common_organization_path = staticmethod( MetricsServiceV2Client.common_organization_path ) parse_common_organization_path = staticmethod( MetricsServiceV2Client.parse_common_organization_path ) - common_project_path = staticmethod(MetricsServiceV2Client.common_project_path) parse_common_project_path = staticmethod( MetricsServiceV2Client.parse_common_project_path ) - common_location_path = staticmethod(MetricsServiceV2Client.common_location_path) parse_common_location_path = staticmethod( MetricsServiceV2Client.parse_common_location_path @@ -82,7 +73,8 @@ class MetricsServiceV2AsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -97,7 +89,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -114,7 +106,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> MetricsServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: MetricsServiceV2Transport: The transport used by the client instance. @@ -128,12 +120,12 @@ def transport(self) -> MetricsServiceV2Transport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the metrics service v2 client. + """Instantiates the metrics service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -165,7 +157,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = MetricsServiceV2Client( credentials=credentials, transport=transport, @@ -198,7 +189,6 @@ async def list_log_metrics( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -227,7 +217,6 @@ async def list_log_metrics( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -240,9 +229,9 @@ async def list_log_metrics( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -292,7 +281,6 @@ async def get_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -328,7 +316,6 @@ async def get_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -341,9 +328,9 @@ async def get_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -401,7 +388,6 @@ async def create_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -437,7 +423,6 @@ async def create_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metric is not None: @@ -498,7 +483,6 @@ async def update_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -534,7 +518,6 @@ async def update_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name if metric is not None: @@ -549,9 +532,9 @@ async def update_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -597,7 +580,6 @@ async def delete_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -618,7 +600,6 @@ async def delete_log_metric( # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -631,9 +612,9 @@ async def delete_log_metric( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index 850236a57..6dcbcdfb3 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,22 +21,20 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -59,7 +55,7 @@ class MetricsServiceV2ClientMeta(type): _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[MetricsServiceV2Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -82,7 +78,8 @@ class MetricsServiceV2Client(metaclass=MetricsServiceV2ClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -116,7 +113,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -133,7 +131,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -152,29 +150,30 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> MetricsServiceV2Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - MetricsServiceV2Transport: The transport used by the client instance. + MetricsServiceV2Transport: The transport used by the client + instance. """ return self._transport @staticmethod def log_metric_path(project: str, metric: str,) -> str: - """Return a fully-qualified log_metric string.""" + """Returns a fully-qualified log_metric string.""" return "projects/{project}/metrics/{metric}".format( project=project, metric=metric, ) @staticmethod def parse_log_metric_path(path: str) -> Dict[str, str]: - """Parse a log_metric path into its component segments.""" + """Parses a log_metric path into its component segments.""" m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -187,7 +186,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -198,7 +197,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -209,7 +208,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -220,7 +219,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -234,12 +233,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, MetricsServiceV2Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the metrics service v2 client. + """Instantiates the metrics service v2 client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -294,9 +293,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -308,12 +308,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -328,8 +330,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -369,7 +371,6 @@ def list_log_metrics( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -400,10 +401,8 @@ def list_log_metrics( # there are no flattened fields. if not isinstance(request, logging_metrics.ListLogMetricsRequest): request = logging_metrics.ListLogMetricsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -453,7 +452,6 @@ def get_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -491,10 +489,8 @@ def get_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.GetLogMetricRequest): request = logging_metrics.GetLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name @@ -552,7 +548,6 @@ def create_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -590,10 +585,8 @@ def create_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.CreateLogMetricRequest): request = logging_metrics.CreateLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if metric is not None: @@ -650,7 +643,6 @@ def update_log_metric( This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -688,10 +680,8 @@ def update_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.UpdateLogMetricRequest): request = logging_metrics.UpdateLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name if metric is not None: @@ -739,7 +729,6 @@ def delete_log_metric( This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -762,10 +751,8 @@ def delete_log_metric( # there are no flattened fields. if not isinstance(request, logging_metrics.DeleteLogMetricRequest): request = logging_metrics.DeleteLogMetricRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if metric_name is not None: request.metric_name = metric_name diff --git a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 15134ac57..8ff178d24 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index f748403b4..10ccb830c 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index c6ae3da41..814f62590 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -36,6 +35,17 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" @@ -48,21 +58,24 @@ class MetricsServiceV2Transport(abc.ABC): "https://www.googleapis.com/auth/logging.write", ) + DEFAULT_HOST: str = "logging.googleapis.com" + def __init__( self, *, - host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -71,7 +84,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -85,29 +98,76 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -118,9 +178,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -134,9 +194,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -153,9 +213,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -169,9 +229,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ServiceUnavailable, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -183,11 +243,11 @@ def _prep_wrapped_messages(self, client_info): @property def list_log_metrics( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.ListLogMetricsRequest], - typing.Union[ + Union[ logging_metrics.ListLogMetricsResponse, - typing.Awaitable[logging_metrics.ListLogMetricsResponse], + Awaitable[logging_metrics.ListLogMetricsResponse], ], ]: raise NotImplementedError() @@ -195,42 +255,36 @@ def list_log_metrics( @property def get_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.GetLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def create_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.CreateLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def update_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.UpdateLogMetricRequest], - typing.Union[ - logging_metrics.LogMetric, typing.Awaitable[logging_metrics.LogMetric] - ], + Union[logging_metrics.LogMetric, Awaitable[logging_metrics.LogMetric]], ]: raise NotImplementedError() @property def delete_log_metric( self, - ) -> typing.Callable[ + ) -> Callable[ [logging_metrics.DeleteLogMetricRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index a9447ac26..1c9b3dde9 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,7 +48,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -65,7 +62,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -175,7 +173,7 @@ def __init__( def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -206,13 +204,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -331,7 +331,7 @@ def update_log_metric( @property def delete_log_metric( self, - ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty.Empty]: + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -350,7 +350,7 @@ def delete_log_metric( self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log_metric"] diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 94017be9d..62a0bf0f8 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,22 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 as empty # type: ignore - +from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport @@ -54,7 +51,7 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): def create_channel( cls, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,13 +78,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -95,7 +94,7 @@ def __init__( self, *, host: str = "logging.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -109,7 +108,8 @@ def __init__( """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -167,7 +167,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -342,7 +341,7 @@ def update_log_metric( @property def delete_log_metric( self, - ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty.Empty]]: + ) -> Callable[[logging_metrics.DeleteLogMetricRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log metric method over gRPC. Deletes a logs-based metric. @@ -361,7 +360,7 @@ def delete_log_metric( self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary( "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_log_metric"] diff --git a/google/cloud/logging_v2/types/__init__.py b/google/cloud/logging_v2/types/__init__.py index 9519c0777..7d1cdd99e 100644 --- a/google/cloud/logging_v2/types/__init__.py +++ b/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .log_entry import ( LogEntry, LogEntryOperation, diff --git a/google/cloud/logging_v2/types/log_entry.py b/google/cloud/logging_v2/types/log_entry.py index e63d6086f..6c57b22d5 100644 --- a/google/cloud/logging_v2/types/log_entry.py +++ b/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,16 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore -from google.logging.type import http_request_pb2 as glt_http_request # type: ignore -from google.logging.type import log_severity_pb2 as log_severity # type: ignore -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as gp_timestamp # type: ignore +from google.api import monitored_resource_pb2 # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -34,7 +31,6 @@ class LogEntry(proto.Message): r"""An individual entry in a log. - Attributes: log_name (str): Required. The resource name of the log to which this log @@ -163,46 +159,31 @@ class LogEntry(proto.Message): associated with the log entry, if any. """ - log_name = proto.Field(proto.STRING, number=12) - + log_name = proto.Field(proto.STRING, number=12,) resource = proto.Field( - proto.MESSAGE, number=8, message=monitored_resource.MonitoredResource, + proto.MESSAGE, number=8, message=monitored_resource_pb2.MonitoredResource, ) - proto_payload = proto.Field( - proto.MESSAGE, number=2, oneof="payload", message=gp_any.Any, + proto.MESSAGE, number=2, oneof="payload", message=any_pb2.Any, ) - - text_payload = proto.Field(proto.STRING, number=3, oneof="payload") - + text_payload = proto.Field(proto.STRING, number=3, oneof="payload",) json_payload = proto.Field( - proto.MESSAGE, number=6, oneof="payload", message=struct.Struct, + proto.MESSAGE, number=6, oneof="payload", message=struct_pb2.Struct, ) - - timestamp = proto.Field(proto.MESSAGE, number=9, message=gp_timestamp.Timestamp,) - + timestamp = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) receive_timestamp = proto.Field( - proto.MESSAGE, number=24, message=gp_timestamp.Timestamp, + proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, ) - - severity = proto.Field(proto.ENUM, number=10, enum=log_severity.LogSeverity,) - - insert_id = proto.Field(proto.STRING, number=4) - + severity = proto.Field(proto.ENUM, number=10, enum=log_severity_pb2.LogSeverity,) + insert_id = proto.Field(proto.STRING, number=4,) http_request = proto.Field( - proto.MESSAGE, number=7, message=glt_http_request.HttpRequest, + proto.MESSAGE, number=7, message=http_request_pb2.HttpRequest, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=11) - + labels = proto.MapField(proto.STRING, proto.STRING, number=11,) operation = proto.Field(proto.MESSAGE, number=15, message="LogEntryOperation",) - - trace = proto.Field(proto.STRING, number=22) - - span_id = proto.Field(proto.STRING, number=27) - - trace_sampled = proto.Field(proto.BOOL, number=30) - + trace = proto.Field(proto.STRING, number=22,) + span_id = proto.Field(proto.STRING, number=27,) + trace_sampled = proto.Field(proto.BOOL, number=30,) source_location = proto.Field( proto.MESSAGE, number=23, message="LogEntrySourceLocation", ) @@ -230,13 +211,10 @@ class LogEntryOperation(proto.Message): last log entry in the operation. """ - id = proto.Field(proto.STRING, number=1) - - producer = proto.Field(proto.STRING, number=2) - - first = proto.Field(proto.BOOL, number=3) - - last = proto.Field(proto.BOOL, number=4) + id = proto.Field(proto.STRING, number=1,) + producer = proto.Field(proto.STRING, number=2,) + first = proto.Field(proto.BOOL, number=3,) + last = proto.Field(proto.BOOL, number=4,) class LogEntrySourceLocation(proto.Message): @@ -261,11 +239,9 @@ class LogEntrySourceLocation(proto.Message): (Go), ``function`` (Python). """ - file = proto.Field(proto.STRING, number=1) - - line = proto.Field(proto.INT64, number=2) - - function = proto.Field(proto.STRING, number=3) + file = proto.Field(proto.STRING, number=1,) + line = proto.Field(proto.INT64, number=2,) + function = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging.py b/google/cloud/logging_v2/types/logging.py index ca739c02c..6d64b9a91 100644 --- a/google/cloud/logging_v2/types/logging.py +++ b/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore +from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry -from google.protobuf import duration_pb2 as duration # type: ignore -from google.rpc import status_pb2 as status # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore __protobuf__ = proto.module( @@ -45,7 +42,6 @@ class DeleteLogRequest(proto.Message): r"""The parameters to DeleteLog. - Attributes: log_name (str): Required. The resource name of the log to delete: @@ -64,12 +60,11 @@ class DeleteLogRequest(proto.Message): [LogEntry][google.logging.v2.LogEntry]. """ - log_name = proto.Field(proto.STRING, number=1) + log_name = proto.Field(proto.STRING, number=1,) class WriteLogEntriesRequest(proto.Message): r"""The parameters to WriteLogEntries. - Attributes: log_name (str): Optional. A default log resource name that is assigned to @@ -158,28 +153,22 @@ class WriteLogEntriesRequest(proto.Message): properly before sending valuable data. """ - log_name = proto.Field(proto.STRING, number=1) - + log_name = proto.Field(proto.STRING, number=1,) resource = proto.Field( - proto.MESSAGE, number=2, message=monitored_resource.MonitoredResource, + proto.MESSAGE, number=2, message=monitored_resource_pb2.MonitoredResource, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) - + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) entries = proto.RepeatedField(proto.MESSAGE, number=4, message=log_entry.LogEntry,) - - partial_success = proto.Field(proto.BOOL, number=5) - - dry_run = proto.Field(proto.BOOL, number=6) + partial_success = proto.Field(proto.BOOL, number=5,) + dry_run = proto.Field(proto.BOOL, number=6,) class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries.""" + r"""Result returned from WriteLogEntries. """ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. - Attributes: log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): When ``WriteLogEntriesRequest.partial_success`` is true, @@ -192,13 +181,12 @@ class WriteLogEntriesPartialErrors(proto.Message): """ log_entry_errors = proto.MapField( - proto.INT32, proto.MESSAGE, number=1, message=status.Status, + proto.INT32, proto.MESSAGE, number=1, message=status_pb2.Status, ) class ListLogEntriesRequest(proto.Message): r"""The parameters to ``ListLogEntries``. - Attributes: resource_names (Sequence[str]): Required. Names of one or more parent resources from which @@ -252,20 +240,15 @@ class ListLogEntriesRequest(proto.Message): should be identical to those in the previous call. """ - resource_names = proto.RepeatedField(proto.STRING, number=8) - - filter = proto.Field(proto.STRING, number=2) - - order_by = proto.Field(proto.STRING, number=3) - - page_size = proto.Field(proto.INT32, number=4) - - page_token = proto.Field(proto.STRING, number=5) + resource_names = proto.RepeatedField(proto.STRING, number=8,) + filter = proto.Field(proto.STRING, number=2,) + order_by = proto.Field(proto.STRING, number=3,) + page_size = proto.Field(proto.INT32, number=4,) + page_token = proto.Field(proto.STRING, number=5,) class ListLogEntriesResponse(proto.Message): r"""Result returned from ``ListLogEntries``. - Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. If ``entries`` is empty, @@ -293,13 +276,11 @@ def raw_page(self): return self entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListMonitoredResourceDescriptorsRequest(proto.Message): r"""The parameters to ListMonitoredResourceDescriptors - Attributes: page_size (int): Optional. The maximum number of results to return from this @@ -314,14 +295,12 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): should be identical to those in the previous call. """ - page_size = proto.Field(proto.INT32, number=1) - - page_token = proto.Field(proto.STRING, number=2) + page_size = proto.Field(proto.INT32, number=1,) + page_token = proto.Field(proto.STRING, number=2,) class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""Result returned from ListMonitoredResourceDescriptors. - Attributes: resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): A list of resource descriptors. @@ -337,15 +316,15 @@ def raw_page(self): return self resource_descriptors = proto.RepeatedField( - proto.MESSAGE, number=1, message=monitored_resource.MonitoredResourceDescriptor, + proto.MESSAGE, + number=1, + message=monitored_resource_pb2.MonitoredResourceDescriptor, ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListLogsRequest(proto.Message): r"""The parameters to ListLogs. - Attributes: parent (str): Required. The resource name that owns the logs: @@ -379,18 +358,14 @@ class ListLogsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". """ - parent = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) - - resource_names = proto.RepeatedField(proto.STRING, number=8) + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + resource_names = proto.RepeatedField(proto.STRING, number=8,) class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. - Attributes: log_names (Sequence[str]): A list of log names. For example, @@ -407,14 +382,12 @@ class ListLogsResponse(proto.Message): def raw_page(self): return self - log_names = proto.RepeatedField(proto.STRING, number=3) - - next_page_token = proto.Field(proto.STRING, number=2) + log_names = proto.RepeatedField(proto.STRING, number=3,) + next_page_token = proto.Field(proto.STRING, number=2,) class TailLogEntriesRequest(proto.Message): r"""The parameters to ``TailLogEntries``. - Attributes: resource_names (Sequence[str]): Required. Name of a parent resource from which to retrieve @@ -451,16 +424,13 @@ class TailLogEntriesRequest(proto.Message): milliseconds. """ - resource_names = proto.RepeatedField(proto.STRING, number=1) - - filter = proto.Field(proto.STRING, number=2) - - buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration.Duration,) + resource_names = proto.RepeatedField(proto.STRING, number=1,) + filter = proto.Field(proto.STRING, number=2,) + buffer_window = proto.Field(proto.MESSAGE, number=3, message=duration_pb2.Duration,) class TailLogEntriesResponse(proto.Message): r"""Result returned from ``TailLogEntries``. - Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will @@ -480,7 +450,6 @@ class TailLogEntriesResponse(proto.Message): class SuppressionInfo(proto.Message): r"""Information about entries that were omitted from the session. - Attributes: reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): The reason that entries were omitted from the @@ -499,11 +468,9 @@ class Reason(proto.Enum): reason = proto.Field( proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) - - suppressed_count = proto.Field(proto.INT32, number=2) + suppressed_count = proto.Field(proto.INT32, number=2,) entries = proto.RepeatedField(proto.MESSAGE, number=1, message=log_entry.LogEntry,) - suppression_info = proto.RepeatedField( proto.MESSAGE, number=2, message=SuppressionInfo, ) diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py index 0d1f896e0..9b6280731 100644 --- a/google/cloud/logging_v2/types/logging_config.py +++ b/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -72,7 +69,6 @@ class LifecycleState(proto.Enum): class LogBucket(proto.Message): r"""Describes a repository of logs. - Attributes: name (str): The resource name of the bucket. For example: @@ -107,24 +103,17 @@ class LogBucket(proto.Message): Output only. The bucket lifecycle state. """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - retention_days = proto.Field(proto.INT32, number=11) - - locked = proto.Field(proto.BOOL, number=9) - + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + retention_days = proto.Field(proto.INT32, number=11,) + locked = proto.Field(proto.BOOL, number=9,) lifecycle_state = proto.Field(proto.ENUM, number=12, enum="LifecycleState",) class LogView(proto.Message): r"""Describes a view over logs in a bucket. - Attributes: name (str): The resource name of the view. @@ -148,15 +137,11 @@ class LogView(proto.Message): resource.type = "gce_instance" AND LOG_ID("stdout") """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=3) - - create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - filter = proto.Field(proto.STRING, number=7) + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + filter = proto.Field(proto.STRING, number=7,) class LogSink(proto.Message): @@ -267,31 +252,24 @@ class VersionFormat(proto.Enum): V2 = 1 V1 = 2 - name = proto.Field(proto.STRING, number=1) - - destination = proto.Field(proto.STRING, number=3) - - filter = proto.Field(proto.STRING, number=5) - - description = proto.Field(proto.STRING, number=18) - - disabled = proto.Field(proto.BOOL, number=19) - + name = proto.Field(proto.STRING, number=1,) + destination = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=5,) + description = proto.Field(proto.STRING, number=18,) + disabled = proto.Field(proto.BOOL, number=19,) exclusions = proto.RepeatedField(proto.MESSAGE, number=16, message="LogExclusion",) - output_version_format = proto.Field(proto.ENUM, number=6, enum=VersionFormat,) - - writer_identity = proto.Field(proto.STRING, number=8) - - include_children = proto.Field(proto.BOOL, number=9) - + writer_identity = proto.Field(proto.STRING, number=8,) + include_children = proto.Field(proto.BOOL, number=9,) bigquery_options = proto.Field( proto.MESSAGE, number=12, oneof="options", message="BigQueryOptions", ) - - create_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,) + create_time = proto.Field( + proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, + ) class BigQueryOptions(proto.Message): @@ -319,14 +297,12 @@ class BigQueryOptions(proto.Message): will have this field set to false. """ - use_partitioned_tables = proto.Field(proto.BOOL, number=1) - - uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3) + use_partitioned_tables = proto.Field(proto.BOOL, number=1,) + uses_timestamp_column_partitioning = proto.Field(proto.BOOL, number=3,) class ListBucketsRequest(proto.Message): r"""The parameters to ``ListBuckets``. - Attributes: parent (str): Required. The parent resource whose buckets are to be @@ -355,16 +331,13 @@ class ListBucketsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListBucketsResponse(proto.Message): r"""The response from ListBuckets. - Attributes: buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): A list of buckets. @@ -380,13 +353,11 @@ def raw_page(self): return self buckets = proto.RepeatedField(proto.MESSAGE, number=1, message="LogBucket",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateBucketRequest(proto.Message): r"""The parameters to ``CreateBucket``. - Attributes: parent (str): Required. The resource in which to create the bucket: @@ -408,16 +379,13 @@ class CreateBucketRequest(proto.Message): name field in the bucket is ignored. """ - parent = proto.Field(proto.STRING, number=1) - - bucket_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + bucket_id = proto.Field(proto.STRING, number=2,) bucket = proto.Field(proto.MESSAGE, number=3, message="LogBucket",) class UpdateBucketRequest(proto.Message): r"""The parameters to ``UpdateBucket``. - Attributes: name (str): Required. The full resource name of the bucket to update. @@ -448,16 +416,15 @@ class UpdateBucketRequest(proto.Message): Example: ``updateMask=retention_days``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) bucket = proto.Field(proto.MESSAGE, number=2, message="LogBucket",) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class GetBucketRequest(proto.Message): r"""The parameters to ``GetBucket``. - Attributes: name (str): Required. The resource name of the bucket: @@ -473,12 +440,11 @@ class GetBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteBucketRequest(proto.Message): r"""The parameters to ``DeleteBucket``. - Attributes: name (str): Required. The full resource name of the bucket to delete. @@ -494,12 +460,11 @@ class DeleteBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UndeleteBucketRequest(proto.Message): r"""The parameters to ``UndeleteBucket``. - Attributes: name (str): Required. The full resource name of the bucket to undelete. @@ -515,12 +480,11 @@ class UndeleteBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListViewsRequest(proto.Message): r"""The parameters to ``ListViews``. - Attributes: parent (str): Required. The bucket whose views are to be listed: @@ -541,16 +505,13 @@ class ListViewsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListViewsResponse(proto.Message): r"""The response from ListViews. - Attributes: views (Sequence[google.cloud.logging_v2.types.LogView]): A list of views. @@ -566,13 +527,11 @@ def raw_page(self): return self views = proto.RepeatedField(proto.MESSAGE, number=1, message="LogView",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class CreateViewRequest(proto.Message): r"""The parameters to ``CreateView``. - Attributes: parent (str): Required. The bucket in which to create the view @@ -589,16 +548,13 @@ class CreateViewRequest(proto.Message): Required. The new view. """ - parent = proto.Field(proto.STRING, number=1) - - view_id = proto.Field(proto.STRING, number=2) - + parent = proto.Field(proto.STRING, number=1,) + view_id = proto.Field(proto.STRING, number=2,) view = proto.Field(proto.MESSAGE, number=3, message="LogView",) class UpdateViewRequest(proto.Message): r"""The parameters to ``UpdateView``. - Attributes: name (str): Required. The full resource name of the view to update @@ -623,16 +579,15 @@ class UpdateViewRequest(proto.Message): Example: ``updateMask=filter``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) view = proto.Field(proto.MESSAGE, number=2, message="LogView",) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class GetViewRequest(proto.Message): r"""The parameters to ``GetView``. - Attributes: name (str): Required. The resource name of the policy: @@ -645,12 +600,11 @@ class GetViewRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class DeleteViewRequest(proto.Message): r"""The parameters to ``DeleteView``. - Attributes: name (str): Required. The full resource name of the view to delete: @@ -663,12 +617,11 @@ class DeleteViewRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ListSinksRequest(proto.Message): r"""The parameters to ``ListSinks``. - Attributes: parent (str): Required. The parent resource whose sinks are to be listed: @@ -692,16 +645,13 @@ class ListSinksRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListSinksResponse(proto.Message): r"""Result returned from ``ListSinks``. - Attributes: sinks (Sequence[google.cloud.logging_v2.types.LogSink]): A list of sinks. @@ -717,13 +667,11 @@ def raw_page(self): return self sinks = proto.RepeatedField(proto.MESSAGE, number=1, message="LogSink",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetSinkRequest(proto.Message): r"""The parameters to ``GetSink``. - Attributes: sink_name (str): Required. The resource name of the sink: @@ -738,12 +686,11 @@ class GetSinkRequest(proto.Message): Example: ``"projects/my-project-id/sinks/my-sink-id"``. """ - sink_name = proto.Field(proto.STRING, number=1) + sink_name = proto.Field(proto.STRING, number=1,) class CreateSinkRequest(proto.Message): r"""The parameters to ``CreateSink``. - Attributes: parent (str): Required. The resource in which to create the sink: @@ -777,16 +724,13 @@ class CreateSinkRequest(proto.Message): [LogSink][google.logging.v2.LogSink]. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - - unique_writer_identity = proto.Field(proto.BOOL, number=3) + unique_writer_identity = proto.Field(proto.BOOL, number=3,) class UpdateSinkRequest(proto.Message): r"""The parameters to ``UpdateSink``. - Attributes: sink_name (str): Required. The full resource name of the sink to update, @@ -837,18 +781,16 @@ class UpdateSinkRequest(proto.Message): Example: ``updateMask=filter``. """ - sink_name = proto.Field(proto.STRING, number=1) - + sink_name = proto.Field(proto.STRING, number=1,) sink = proto.Field(proto.MESSAGE, number=2, message="LogSink",) - - unique_writer_identity = proto.Field(proto.BOOL, number=3) - - update_mask = proto.Field(proto.MESSAGE, number=4, message=field_mask.FieldMask,) + unique_writer_identity = proto.Field(proto.BOOL, number=3,) + update_mask = proto.Field( + proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, + ) class DeleteSinkRequest(proto.Message): r"""The parameters to ``DeleteSink``. - Attributes: sink_name (str): Required. The full resource name of the sink to delete, @@ -864,7 +806,7 @@ class DeleteSinkRequest(proto.Message): Example: ``"projects/my-project-id/sinks/my-sink-id"``. """ - sink_name = proto.Field(proto.STRING, number=1) + sink_name = proto.Field(proto.STRING, number=1,) class LogExclusion(proto.Message): @@ -913,22 +855,16 @@ class LogExclusion(proto.Message): exclusions. """ - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - filter = proto.Field(proto.STRING, number=3) - - disabled = proto.Field(proto.BOOL, number=4) - - create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + filter = proto.Field(proto.STRING, number=3,) + disabled = proto.Field(proto.BOOL, number=4,) + create_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp,) class ListExclusionsRequest(proto.Message): r"""The parameters to ``ListExclusions``. - Attributes: parent (str): Required. The parent resource whose exclusions are to be @@ -953,16 +889,13 @@ class ListExclusionsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListExclusionsResponse(proto.Message): r"""Result returned from ``ListExclusions``. - Attributes: exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): A list of exclusions. @@ -978,13 +911,11 @@ def raw_page(self): return self exclusions = proto.RepeatedField(proto.MESSAGE, number=1, message="LogExclusion",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetExclusionRequest(proto.Message): r"""The parameters to ``GetExclusion``. - Attributes: name (str): Required. The resource name of an existing exclusion: @@ -1000,12 +931,11 @@ class GetExclusionRequest(proto.Message): ``"projects/my-project-id/exclusions/my-exclusion-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class CreateExclusionRequest(proto.Message): r"""The parameters to ``CreateExclusion``. - Attributes: parent (str): Required. The parent resource in which to create the @@ -1026,14 +956,12 @@ class CreateExclusionRequest(proto.Message): resource. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) class UpdateExclusionRequest(proto.Message): r"""The parameters to ``UpdateExclusion``. - Attributes: name (str): Required. The resource name of the exclusion to update: @@ -1063,16 +991,15 @@ class UpdateExclusionRequest(proto.Message): ``"filter,description"``. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) exclusion = proto.Field(proto.MESSAGE, number=2, message="LogExclusion",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class DeleteExclusionRequest(proto.Message): r"""The parameters to ``DeleteExclusion``. - Attributes: name (str): Required. The resource name of an existing exclusion to @@ -1089,7 +1016,7 @@ class DeleteExclusionRequest(proto.Message): ``"projects/my-project-id/exclusions/my-exclusion-id"``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class GetCmekSettingsRequest(proto.Message): @@ -1118,7 +1045,7 @@ class GetCmekSettingsRequest(proto.Message): applies to all projects and folders in the GCP organization. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateCmekSettingsRequest(proto.Message): @@ -1163,11 +1090,11 @@ class UpdateCmekSettingsRequest(proto.Message): Example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) cmek_settings = proto.Field(proto.MESSAGE, number=2, message="CmekSettings",) - - update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, + ) class CmekSettings(proto.Message): @@ -1232,11 +1159,9 @@ class CmekSettings(proto.Message): for more information. """ - name = proto.Field(proto.STRING, number=1) - - kms_key_name = proto.Field(proto.STRING, number=2) - - service_account_id = proto.Field(proto.STRING, number=3) + name = proto.Field(proto.STRING, number=1,) + kms_key_name = proto.Field(proto.STRING, number=2,) + service_account_id = proto.Field(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/logging_v2/types/logging_metrics.py b/google/cloud/logging_v2/types/logging_metrics.py index c2a8a6007..4b39650f2 100644 --- a/google/cloud/logging_v2/types/logging_metrics.py +++ b/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( @@ -170,34 +167,26 @@ class ApiVersion(proto.Enum): V2 = 0 V1 = 1 - name = proto.Field(proto.STRING, number=1) - - description = proto.Field(proto.STRING, number=2) - - filter = proto.Field(proto.STRING, number=3) - + name = proto.Field(proto.STRING, number=1,) + description = proto.Field(proto.STRING, number=2,) + filter = proto.Field(proto.STRING, number=3,) metric_descriptor = proto.Field( - proto.MESSAGE, number=5, message=ga_metric.MetricDescriptor, + proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, ) - - value_extractor = proto.Field(proto.STRING, number=6) - - label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7) - + value_extractor = proto.Field(proto.STRING, number=6,) + label_extractors = proto.MapField(proto.STRING, proto.STRING, number=7,) bucket_options = proto.Field( - proto.MESSAGE, number=8, message=distribution.Distribution.BucketOptions, + proto.MESSAGE, number=8, message=distribution_pb2.Distribution.BucketOptions, + ) + create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp,) + update_time = proto.Field( + proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp, ) - - create_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) - - update_time = proto.Field(proto.MESSAGE, number=10, message=timestamp.Timestamp,) - version = proto.Field(proto.ENUM, number=4, enum=ApiVersion,) class ListLogMetricsRequest(proto.Message): r"""The parameters to ListLogMetrics. - Attributes: parent (str): Required. The name of the project containing the metrics: @@ -218,16 +207,13 @@ class ListLogMetricsRequest(proto.Message): results might be available. """ - parent = proto.Field(proto.STRING, number=1) - - page_token = proto.Field(proto.STRING, number=2) - - page_size = proto.Field(proto.INT32, number=3) + parent = proto.Field(proto.STRING, number=1,) + page_token = proto.Field(proto.STRING, number=2,) + page_size = proto.Field(proto.INT32, number=3,) class ListLogMetricsResponse(proto.Message): r"""Result returned from ListLogMetrics. - Attributes: metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): A list of logs-based metrics. @@ -243,13 +229,11 @@ def raw_page(self): return self metrics = proto.RepeatedField(proto.MESSAGE, number=1, message="LogMetric",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class GetLogMetricRequest(proto.Message): r"""The parameters to GetLogMetric. - Attributes: metric_name (str): Required. The resource name of the desired metric: @@ -259,12 +243,11 @@ class GetLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1) + metric_name = proto.Field(proto.STRING, number=1,) class CreateLogMetricRequest(proto.Message): r"""The parameters to CreateLogMetric. - Attributes: parent (str): Required. The resource name of the project in which to @@ -280,14 +263,12 @@ class CreateLogMetricRequest(proto.Message): must not have an identifier that already exists. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) class UpdateLogMetricRequest(proto.Message): r"""The parameters to UpdateLogMetric. - Attributes: metric_name (str): Required. The resource name of the metric to update: @@ -304,14 +285,12 @@ class UpdateLogMetricRequest(proto.Message): Required. The updated metric. """ - metric_name = proto.Field(proto.STRING, number=1) - + metric_name = proto.Field(proto.STRING, number=1,) metric = proto.Field(proto.MESSAGE, number=2, message="LogMetric",) class DeleteLogMetricRequest(proto.Message): r"""The parameters to DeleteLogMetric. - Attributes: metric_name (str): Required. The resource name of the metric to delete: @@ -321,7 +300,7 @@ class DeleteLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field(proto.STRING, number=1) + metric_name = proto.Field(proto.STRING, number=1,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 493d67e6f..9e49bd0d9 100644 --- a/noxfile.py +++ b/noxfile.py @@ -198,7 +198,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -220,7 +220,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") + session.install( + "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 956cdf4f9..5ff9e1db5 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -50,7 +50,10 @@ # to use your own Cloud project. 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. 'envs': {}, @@ -170,6 +173,9 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 489b0453d..effb2f98d 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.3.1 -google-cloud-bigquery==2.16.0 +google-cloud-logging==2.4.0 +google-cloud-bigquery==2.20.0 google-cloud-storage==1.38.0 -google-cloud-pubsub==2.4.2 +google-cloud-pubsub==2.5.0 diff --git a/setup.py b/setup.py index 36426fb60..1cdf5e4e2 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.4.0" +version = "2.5.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -30,8 +30,11 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "google-cloud-appengine-logging >= 0.1.0, < 1.0.0dev", + "google-cloud-audit-log >= 0.1.0, < 1.0.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", "proto-plus >= 1.11.0", + "packaging >= 14.3", ] extras = {} diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index ae89ab4a1..61bbd6ec6 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -8,3 +8,4 @@ google-api-core==1.22.2 google-cloud-core==1.4.1 proto-plus==1.11.0 +packaging==14.3 diff --git a/tests/__init__.py b/tests/__init__.py index e69de29bb..4de65971c 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/environment b/tests/environment index 30d6a8083..a0af8d102 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit 30d6a80838a1cae6fb3945f41f3e1d90e815c0c9 +Subproject commit a0af8d102a3c711cdff0dd12e01c8bfd357b7a83 diff --git a/tests/system/test_system.py b/tests/system/test_system.py index cc6d03804..81de866ee 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -16,9 +16,11 @@ from datetime import timedelta from datetime import timezone import logging +import numbers import os import pytest import unittest +import uuid from google.api_core.exceptions import BadGateway from google.api_core.exceptions import Conflict @@ -36,6 +38,8 @@ from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource +from google.protobuf.struct_pb2 import Struct, Value, ListValue, NullValue + from test_utils.retry import RetryErrors from test_utils.retry import RetryResult from test_utils.system import unique_resource_id @@ -142,32 +146,119 @@ def tearDown(self): def _logger_name(prefix): return prefix + unique_resource_id("-") - def test_list_entry_with_unregistered(self): - from google.protobuf import any_pb2 + @staticmethod + def _to_value(data): + if data is None: + return Value(null_value=NullValue.NULL_VALUE) + elif isinstance(data, numbers.Number): + return Value(number_value=data) + elif isinstance(data, str): + return Value(string_value=data) + elif isinstance(data, bool): + return Value(bool_value=data) + elif isinstance(data, (list, tuple, set)): + return Value( + list_value=ListValue(values=(TestLogging._to_value(e) for e in data)) + ) + elif isinstance(data, dict): + return Value(struct_value=TestLogging._dict_to_struct(data)) + else: + raise TypeError("Unknown data type: %r" % type(data)) + + @staticmethod + def _dict_to_struct(data): + return Struct(fields={k: TestLogging._to_value(v) for k, v in data.items()}) + + def test_list_entry_with_auditlog(self): + """ + Test emitting and listing logs containing a google.cloud.audit.AuditLog proto message + """ from google.protobuf import descriptor_pool from google.cloud.logging_v2 import entries pool = descriptor_pool.Default() type_name = "google.cloud.audit.AuditLog" - # Make sure the descriptor is not known in the registry. - with self.assertRaises(KeyError): - pool.FindMessageTypeByName(type_name) - type_url = "type.googleapis.com/" + type_name - filter_ = self.TYPE_FILTER.format(type_url) + f" AND {_time_filter}" - entry_iter = iter(Config.CLIENT.list_entries(page_size=1, filter_=filter_)) + # Make sure the descriptor is known in the registry. + # Raises KeyError if unknown + pool.FindMessageTypeByName(type_name) + + # create log + audit_dict = { + "@type": type_url, + "methodName": "test", + "requestMetadata": {"callerIp": "::1", "callerSuppliedUserAgent": "test"}, + "resourceName": "test", + "serviceName": "test", + "status": {"code": 0}, + } + audit_struct = self._dict_to_struct(audit_dict) + + logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") + logger.log_proto(audit_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() - retry = RetryErrors(TooManyRequests) - protobuf_entry = retry(lambda: next(entry_iter))() + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.payload_json["methodName"], audit_dict["methodName"] + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["methodName"], + audit_dict["methodName"], + ) + + def test_list_entry_with_requestlog(self): + """ + Test emitting and listing logs containing a google.appengine.logging.v1.RequestLog proto message + """ + from google.protobuf import descriptor_pool + from google.cloud.logging_v2 import entries + + pool = descriptor_pool.Default() + type_name = "google.appengine.logging.v1.RequestLog" + type_url = "type.googleapis.com/" + type_name + # Make sure the descriptor is known in the registry. + # Raises KeyError if unknown + pool.FindMessageTypeByName(type_name) + + # create log + req_dict = { + "@type": type_url, + "ip": "0.0.0.0", + "appId": "test", + "versionId": "test", + "requestId": "12345", + "latency": "500.0s", + "method": "GET", + "status": 500, + "resource": "test", + "httpVersion": "HTTP/1.1", + } + req_struct = self._dict_to_struct(req_dict) + + logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") + logger.log_proto(req_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - if Config.CLIENT._use_grpc: - self.assertIsNone(protobuf_entry.payload_json) - self.assertIsInstance(protobuf_entry.payload_pb, any_pb2.Any) - self.assertEqual(protobuf_entry.payload_pb.type_url, type_url) - else: - self.assertIsNone(protobuf_entry.payload_pb) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" @@ -288,7 +379,7 @@ def test_log_handler_async(self): cloud_logger = logging.getLogger(handler.name) cloud_logger.addHandler(handler) - cloud_logger.warn(LOG_MESSAGE) + cloud_logger.warning(LOG_MESSAGE) handler.flush() entries = _list_entries(logger) expected_payload = {"message": LOG_MESSAGE, "python_logger": handler.name} @@ -310,7 +401,7 @@ def test_log_handler_sync(self): LOGGER_NAME = "mylogger" cloud_logger = logging.getLogger(LOGGER_NAME) cloud_logger.addHandler(handler) - cloud_logger.warn(LOG_MESSAGE) + cloud_logger.warning(LOG_MESSAGE) entries = _list_entries(logger) expected_payload = {"message": LOG_MESSAGE, "python_logger": LOGGER_NAME} @@ -342,7 +433,7 @@ def test_handlers_w_extras(self): "resource": Resource(type="cloudiot_device", labels={}), "labels": {"test-label": "manual"}, } - cloud_logger.warn(LOG_MESSAGE, extra=extra) + cloud_logger.warning(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) @@ -363,7 +454,7 @@ def test_log_root_handler(self): self.to_delete.append(logger) google.cloud.logging.handlers.handlers.setup_logging(handler) - logging.warn(LOG_MESSAGE) + logging.warning(LOG_MESSAGE) entries = _list_entries(logger) expected_payload = {"message": LOG_MESSAGE, "python_logger": "root"} diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index df379f1e9..4de65971c 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,4 +1,5 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +12,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 000000000..4de65971c --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/logging_v2/__init__.py b/tests/unit/gapic/logging_v2/__init__.py index 42ffdf2bc..4de65971c 100644 --- a/tests/unit/gapic/logging_v2/__init__.py +++ b/tests/unit/gapic/logging_v2/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index d6a2f3983..8be1ee06f 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ( ConfigServiceV2AsyncClient, @@ -38,10 +37,40 @@ from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.services.config_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.config_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import logging_config from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -92,7 +121,7 @@ def test__get_default_mtls_endpoint(): "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] ) def test_config_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -109,7 +138,7 @@ def test_config_service_v2_client_from_service_account_info(client_class): "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient,] ) def test_config_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -162,7 +191,7 @@ def test_config_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(ConfigServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -460,7 +489,7 @@ def test_list_buckets( transport: str = "grpc", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -473,19 +502,15 @@ def test_list_buckets( call.return_value = logging_config.ListBucketsResponse( next_page_token="next_page_token_value", ) - response = client.list_buckets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == "next_page_token_value" @@ -497,7 +522,7 @@ def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -505,7 +530,6 @@ def test_list_buckets_empty_call(): client.list_buckets() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() @@ -514,7 +538,7 @@ async def test_list_buckets_async( transport: str = "grpc_asyncio", request_type=logging_config.ListBucketsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -527,18 +551,15 @@ async def test_list_buckets_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListBucketsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -548,17 +569,17 @@ async def test_list_buckets_async_from_dict(): def test_list_buckets_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: call.return_value = logging_config.ListBucketsResponse() - client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -573,11 +594,14 @@ def test_list_buckets_field_headers(): @pytest.mark.asyncio async def test_list_buckets_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -585,7 +609,6 @@ async def test_list_buckets_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListBucketsResponse() ) - await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. @@ -599,13 +622,12 @@ async def test_list_buckets_field_headers_async(): def test_list_buckets_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListBucketsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_buckets(parent="parent_value",) @@ -614,12 +636,11 @@ def test_list_buckets_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_buckets_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -631,7 +652,9 @@ def test_list_buckets_flattened_error(): @pytest.mark.asyncio async def test_list_buckets_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -649,13 +672,14 @@ async def test_list_buckets_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -666,7 +690,7 @@ async def test_list_buckets_flattened_error_async(): def test_list_buckets_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -704,7 +728,7 @@ def test_list_buckets_pager(): def test_list_buckets_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -734,7 +758,9 @@ def test_list_buckets_pages(): @pytest.mark.asyncio async def test_list_buckets_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -771,7 +797,9 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -807,7 +835,7 @@ def test_get_bucket( transport: str = "grpc", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -824,27 +852,19 @@ def test_get_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.get_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -856,7 +876,7 @@ def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -864,7 +884,6 @@ def test_get_bucket_empty_call(): client.get_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() @@ -873,7 +892,7 @@ async def test_get_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.GetBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -892,26 +911,19 @@ async def test_get_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -921,17 +933,17 @@ async def test_get_bucket_async_from_dict(): def test_get_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -946,11 +958,14 @@ def test_get_bucket_field_headers(): @pytest.mark.asyncio async def test_get_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -958,7 +973,6 @@ async def test_get_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -975,7 +989,7 @@ def test_create_bucket( transport: str = "grpc", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -992,27 +1006,19 @@ def test_create_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1024,7 +1030,7 @@ def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1032,7 +1038,6 @@ def test_create_bucket_empty_call(): client.create_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() @@ -1041,7 +1046,7 @@ async def test_create_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1060,26 +1065,19 @@ async def test_create_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1089,17 +1087,17 @@ async def test_create_bucket_async_from_dict(): def test_create_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1114,11 +1112,14 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio async def test_create_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1126,7 +1127,6 @@ async def test_create_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1143,7 +1143,7 @@ def test_update_bucket( transport: str = "grpc", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1160,27 +1160,19 @@ def test_update_bucket( locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1192,7 +1184,7 @@ def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1200,7 +1192,6 @@ def test_update_bucket_empty_call(): client.update_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() @@ -1209,7 +1200,7 @@ async def test_update_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1228,26 +1219,19 @@ async def test_update_bucket_async( lifecycle_state=logging_config.LifecycleState.ACTIVE, ) ) - response = await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @@ -1257,17 +1241,17 @@ async def test_update_bucket_async_from_dict(): def test_update_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_bucket), "__call__") as call: call.return_value = logging_config.LogBucket() - client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1282,11 +1266,14 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio async def test_update_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1294,7 +1281,6 @@ async def test_update_bucket_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogBucket() ) - await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1311,7 +1297,7 @@ def test_delete_bucket( transport: str = "grpc", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1322,13 +1308,11 @@ def test_delete_bucket( with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1343,7 +1327,7 @@ def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1351,7 +1335,6 @@ def test_delete_bucket_empty_call(): client.delete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() @@ -1360,7 +1343,7 @@ async def test_delete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1371,13 +1354,11 @@ async def test_delete_bucket_async( with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1390,17 +1371,17 @@ async def test_delete_bucket_async_from_dict(): def test_delete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = None - client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1415,17 +1396,19 @@ def test_delete_bucket_field_headers(): @pytest.mark.asyncio async def test_delete_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1442,7 +1425,7 @@ def test_undelete_bucket( transport: str = "grpc", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1453,13 +1436,11 @@ def test_undelete_bucket( with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1474,7 +1455,7 @@ def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1482,7 +1463,6 @@ def test_undelete_bucket_empty_call(): client.undelete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() @@ -1491,7 +1471,7 @@ async def test_undelete_bucket_async( transport: str = "grpc_asyncio", request_type=logging_config.UndeleteBucketRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1502,13 +1482,11 @@ async def test_undelete_bucket_async( with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() # Establish that the response is the type that we expect. @@ -1521,17 +1499,17 @@ async def test_undelete_bucket_async_from_dict(): def test_undelete_bucket_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = None - client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1546,17 +1524,19 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio async def test_undelete_bucket_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.undelete_bucket), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. @@ -1573,7 +1553,7 @@ def test_list_views( transport: str = "grpc", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1586,19 +1566,15 @@ def test_list_views( call.return_value = logging_config.ListViewsResponse( next_page_token="next_page_token_value", ) - response = client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == "next_page_token_value" @@ -1610,7 +1586,7 @@ def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1618,7 +1594,6 @@ def test_list_views_empty_call(): client.list_views() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() @@ -1627,7 +1602,7 @@ async def test_list_views_async( transport: str = "grpc_asyncio", request_type=logging_config.ListViewsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1640,18 +1615,15 @@ async def test_list_views_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListViewsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_views(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1661,17 +1633,17 @@ async def test_list_views_async_from_dict(): def test_list_views_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: call.return_value = logging_config.ListViewsResponse() - client.list_views(request) # Establish that the underlying gRPC stub method was called. @@ -1686,11 +1658,14 @@ def test_list_views_field_headers(): @pytest.mark.asyncio async def test_list_views_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1698,7 +1673,6 @@ async def test_list_views_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListViewsResponse() ) - await client.list_views(request) # Establish that the underlying gRPC stub method was called. @@ -1712,13 +1686,12 @@ async def test_list_views_field_headers_async(): def test_list_views_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_views(parent="parent_value",) @@ -1727,12 +1700,11 @@ def test_list_views_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_views_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1744,7 +1716,9 @@ def test_list_views_flattened_error(): @pytest.mark.asyncio async def test_list_views_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1762,13 +1736,14 @@ async def test_list_views_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_views_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1779,7 +1754,7 @@ async def test_list_views_flattened_error_async(): def test_list_views_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1817,7 +1792,7 @@ def test_list_views_pager(): def test_list_views_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1847,7 +1822,9 @@ def test_list_views_pages(): @pytest.mark.asyncio async def test_list_views_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1884,7 +1861,9 @@ async def test_list_views_async_pager(): @pytest.mark.asyncio async def test_list_views_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1918,7 +1897,7 @@ async def test_list_views_async_pages(): def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRequest): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1931,23 +1910,17 @@ def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRe call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -1959,7 +1932,7 @@ def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1967,7 +1940,6 @@ def test_get_view_empty_call(): client.get_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() @@ -1976,7 +1948,7 @@ async def test_get_view_async( transport: str = "grpc_asyncio", request_type=logging_config.GetViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1993,22 +1965,17 @@ async def test_get_view_async( filter="filter_value", ) ) - response = await client.get_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2018,17 +1985,17 @@ async def test_get_view_async_from_dict(): def test_get_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_view), "__call__") as call: call.return_value = logging_config.LogView() - client.get_view(request) # Establish that the underlying gRPC stub method was called. @@ -2043,11 +2010,14 @@ def test_get_view_field_headers(): @pytest.mark.asyncio async def test_get_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2055,7 +2025,6 @@ async def test_get_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.get_view(request) # Establish that the underlying gRPC stub method was called. @@ -2072,7 +2041,7 @@ def test_create_view( transport: str = "grpc", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2085,23 +2054,17 @@ def test_create_view( call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2113,7 +2076,7 @@ def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2121,7 +2084,6 @@ def test_create_view_empty_call(): client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() @@ -2130,7 +2092,7 @@ async def test_create_view_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2147,22 +2109,17 @@ async def test_create_view_async( filter="filter_value", ) ) - response = await client.create_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2172,17 +2129,17 @@ async def test_create_view_async_from_dict(): def test_create_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_view), "__call__") as call: call.return_value = logging_config.LogView() - client.create_view(request) # Establish that the underlying gRPC stub method was called. @@ -2197,11 +2154,14 @@ def test_create_view_field_headers(): @pytest.mark.asyncio async def test_create_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2209,7 +2169,6 @@ async def test_create_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.create_view(request) # Establish that the underlying gRPC stub method was called. @@ -2226,7 +2185,7 @@ def test_update_view( transport: str = "grpc", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2239,23 +2198,17 @@ def test_update_view( call.return_value = logging_config.LogView( name="name_value", description="description_value", filter="filter_value", ) - response = client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2267,7 +2220,7 @@ def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2275,7 +2228,6 @@ def test_update_view_empty_call(): client.update_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() @@ -2284,7 +2236,7 @@ async def test_update_view_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2301,22 +2253,17 @@ async def test_update_view_async( filter="filter_value", ) ) - response = await client.update_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" @@ -2326,17 +2273,17 @@ async def test_update_view_async_from_dict(): def test_update_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_view), "__call__") as call: call.return_value = logging_config.LogView() - client.update_view(request) # Establish that the underlying gRPC stub method was called. @@ -2351,11 +2298,14 @@ def test_update_view_field_headers(): @pytest.mark.asyncio async def test_update_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2363,7 +2313,6 @@ async def test_update_view_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogView() ) - await client.update_view(request) # Establish that the underlying gRPC stub method was called. @@ -2380,7 +2329,7 @@ def test_delete_view( transport: str = "grpc", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2391,13 +2340,11 @@ def test_delete_view( with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() # Establish that the response is the type that we expect. @@ -2412,7 +2359,7 @@ def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2420,7 +2367,6 @@ def test_delete_view_empty_call(): client.delete_view() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() @@ -2429,7 +2375,7 @@ async def test_delete_view_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteViewRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2440,13 +2386,11 @@ async def test_delete_view_async( with mock.patch.object(type(client.transport.delete_view), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() # Establish that the response is the type that we expect. @@ -2459,17 +2403,17 @@ async def test_delete_view_async_from_dict(): def test_delete_view_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = None - client.delete_view(request) # Establish that the underlying gRPC stub method was called. @@ -2484,17 +2428,19 @@ def test_delete_view_field_headers(): @pytest.mark.asyncio async def test_delete_view_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_view), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view(request) # Establish that the underlying gRPC stub method was called. @@ -2511,7 +2457,7 @@ def test_list_sinks( transport: str = "grpc", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2524,19 +2470,15 @@ def test_list_sinks( call.return_value = logging_config.ListSinksResponse( next_page_token="next_page_token_value", ) - response = client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == "next_page_token_value" @@ -2548,7 +2490,7 @@ def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2556,7 +2498,6 @@ def test_list_sinks_empty_call(): client.list_sinks() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() @@ -2565,7 +2506,7 @@ async def test_list_sinks_async( transport: str = "grpc_asyncio", request_type=logging_config.ListSinksRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2578,18 +2519,15 @@ async def test_list_sinks_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListSinksResponse(next_page_token="next_page_token_value",) ) - response = await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -2599,17 +2537,17 @@ async def test_list_sinks_async_from_dict(): def test_list_sinks_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: call.return_value = logging_config.ListSinksResponse() - client.list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -2624,11 +2562,14 @@ def test_list_sinks_field_headers(): @pytest.mark.asyncio async def test_list_sinks_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2636,7 +2577,6 @@ async def test_list_sinks_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListSinksResponse() ) - await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. @@ -2650,13 +2590,12 @@ async def test_list_sinks_field_headers_async(): def test_list_sinks_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_sinks(parent="parent_value",) @@ -2665,12 +2604,11 @@ def test_list_sinks_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_sinks_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2682,7 +2620,9 @@ def test_list_sinks_flattened_error(): @pytest.mark.asyncio async def test_list_sinks_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2700,13 +2640,14 @@ async def test_list_sinks_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2717,7 +2658,7 @@ async def test_list_sinks_flattened_error_async(): def test_list_sinks_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2755,7 +2696,7 @@ def test_list_sinks_pager(): def test_list_sinks_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2785,7 +2726,9 @@ def test_list_sinks_pages(): @pytest.mark.asyncio async def test_list_sinks_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2822,7 +2765,9 @@ async def test_list_sinks_async_pager(): @pytest.mark.asyncio async def test_list_sinks_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2856,7 +2801,7 @@ async def test_list_sinks_async_pages(): def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRequest): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2879,33 +2824,22 @@ def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRe use_partitioned_tables=True ), ) - response = client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -2917,7 +2851,7 @@ def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2925,7 +2859,6 @@ def test_get_sink_empty_call(): client.get_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() @@ -2934,7 +2867,7 @@ async def test_get_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.GetSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2956,32 +2889,22 @@ async def test_get_sink_async( include_children=True, ) ) - response = await client.get_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -2991,17 +2914,17 @@ async def test_get_sink_async_from_dict(): def test_get_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3016,11 +2939,14 @@ def test_get_sink_field_headers(): @pytest.mark.asyncio async def test_get_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3028,7 +2954,6 @@ async def test_get_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.get_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3042,13 +2967,12 @@ async def test_get_sink_field_headers_async(): def test_get_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_sink(sink_name="sink_name_value",) @@ -3057,12 +2981,11 @@ def test_get_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" def test_get_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3074,7 +2997,9 @@ def test_get_sink_flattened_error(): @pytest.mark.asyncio async def test_get_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_sink), "__call__") as call: @@ -3092,13 +3017,14 @@ async def test_get_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3112,7 +3038,7 @@ def test_create_sink( transport: str = "grpc", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3135,33 +3061,22 @@ def test_create_sink( use_partitioned_tables=True ), ) - response = client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3173,7 +3088,7 @@ def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3181,7 +3096,6 @@ def test_create_sink_empty_call(): client.create_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() @@ -3190,7 +3104,7 @@ async def test_create_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3212,32 +3126,22 @@ async def test_create_sink_async( include_children=True, ) ) - response = await client.create_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3247,17 +3151,17 @@ async def test_create_sink_async_from_dict(): def test_create_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3272,11 +3176,14 @@ def test_create_sink_field_headers(): @pytest.mark.asyncio async def test_create_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3284,7 +3191,6 @@ async def test_create_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.create_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3298,13 +3204,12 @@ async def test_create_sink_field_headers_async(): def test_create_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_sink( @@ -3315,14 +3220,12 @@ def test_create_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") def test_create_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3336,7 +3239,9 @@ def test_create_sink_flattened_error(): @pytest.mark.asyncio async def test_create_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_sink), "__call__") as call: @@ -3356,15 +3261,15 @@ async def test_create_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3380,7 +3285,7 @@ def test_update_sink( transport: str = "grpc", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3403,33 +3308,22 @@ def test_update_sink( use_partitioned_tables=True ), ) - response = client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3441,7 +3335,7 @@ def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3449,7 +3343,6 @@ def test_update_sink_empty_call(): client.update_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() @@ -3458,7 +3351,7 @@ async def test_update_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3480,32 +3373,22 @@ async def test_update_sink_async( include_children=True, ) ) - response = await client.update_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) - assert response.name == "name_value" - assert response.destination == "destination_value" - assert response.filter == "filter_value" - assert response.description == "description_value" - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == "writer_identity_value" - assert response.include_children is True @@ -3515,17 +3398,17 @@ async def test_update_sink_async_from_dict(): def test_update_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: call.return_value = logging_config.LogSink() - client.update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3540,11 +3423,14 @@ def test_update_sink_field_headers(): @pytest.mark.asyncio async def test_update_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3552,7 +3438,6 @@ async def test_update_sink_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogSink() ) - await client.update_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3566,35 +3451,31 @@ async def test_update_sink_field_headers_async(): def test_update_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_sink( sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3603,13 +3484,15 @@ def test_update_sink_flattened_error(): logging_config.UpdateSinkRequest(), sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_sink), "__call__") as call: @@ -3624,24 +3507,23 @@ async def test_update_sink_flattened_async(): response = await client.update_sink( sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3650,7 +3532,7 @@ async def test_update_sink_flattened_error_async(): logging_config.UpdateSinkRequest(), sink_name="sink_name_value", sink=logging_config.LogSink(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -3658,7 +3540,7 @@ def test_delete_sink( transport: str = "grpc", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3669,13 +3551,11 @@ def test_delete_sink( with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() # Establish that the response is the type that we expect. @@ -3690,7 +3570,7 @@ def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3698,7 +3578,6 @@ def test_delete_sink_empty_call(): client.delete_sink() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() @@ -3707,7 +3586,7 @@ async def test_delete_sink_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteSinkRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3718,13 +3597,11 @@ async def test_delete_sink_async( with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() # Establish that the response is the type that we expect. @@ -3737,17 +3614,17 @@ async def test_delete_sink_async_from_dict(): def test_delete_sink_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = None - client.delete_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3762,17 +3639,19 @@ def test_delete_sink_field_headers(): @pytest.mark.asyncio async def test_delete_sink_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() + request.sink_name = "sink_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. @@ -3786,13 +3665,12 @@ async def test_delete_sink_field_headers_async(): def test_delete_sink_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_sink(sink_name="sink_name_value",) @@ -3801,12 +3679,11 @@ def test_delete_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" def test_delete_sink_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3818,7 +3695,9 @@ def test_delete_sink_flattened_error(): @pytest.mark.asyncio async def test_delete_sink_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_sink), "__call__") as call: @@ -3834,13 +3713,14 @@ async def test_delete_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3854,7 +3734,7 @@ def test_list_exclusions( transport: str = "grpc", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3867,19 +3747,15 @@ def test_list_exclusions( call.return_value = logging_config.ListExclusionsResponse( next_page_token="next_page_token_value", ) - response = client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == "next_page_token_value" @@ -3891,7 +3767,7 @@ def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3899,7 +3775,6 @@ def test_list_exclusions_empty_call(): client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() @@ -3908,7 +3783,7 @@ async def test_list_exclusions_async( transport: str = "grpc_asyncio", request_type=logging_config.ListExclusionsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3923,18 +3798,15 @@ async def test_list_exclusions_async( next_page_token="next_page_token_value", ) ) - response = await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -3944,17 +3816,17 @@ async def test_list_exclusions_async_from_dict(): def test_list_exclusions_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: call.return_value = logging_config.ListExclusionsResponse() - client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -3969,11 +3841,14 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio async def test_list_exclusions_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3981,7 +3856,6 @@ async def test_list_exclusions_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.ListExclusionsResponse() ) - await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. @@ -3995,13 +3869,12 @@ async def test_list_exclusions_field_headers_async(): def test_list_exclusions_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_exclusions(parent="parent_value",) @@ -4010,12 +3883,11 @@ def test_list_exclusions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_exclusions_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4027,7 +3899,9 @@ def test_list_exclusions_flattened_error(): @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4045,13 +3919,14 @@ async def test_list_exclusions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4062,7 +3937,7 @@ async def test_list_exclusions_flattened_error_async(): def test_list_exclusions_pager(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4105,7 +3980,7 @@ def test_list_exclusions_pager(): def test_list_exclusions_pages(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4140,7 +4015,9 @@ def test_list_exclusions_pages(): @pytest.mark.asyncio async def test_list_exclusions_async_pager(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4182,7 +4059,9 @@ async def test_list_exclusions_async_pager(): @pytest.mark.asyncio async def test_list_exclusions_async_pages(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4223,7 +4102,7 @@ def test_get_exclusion( transport: str = "grpc", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4239,25 +4118,18 @@ def test_get_exclusion( filter="filter_value", disabled=True, ) - response = client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4269,7 +4141,7 @@ def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4277,7 +4149,6 @@ def test_get_exclusion_empty_call(): client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() @@ -4286,7 +4157,7 @@ async def test_get_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.GetExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4304,24 +4175,18 @@ async def test_get_exclusion_async( disabled=True, ) ) - response = await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4331,17 +4196,17 @@ async def test_get_exclusion_async_from_dict(): def test_get_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4356,11 +4221,14 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio async def test_get_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4368,7 +4236,6 @@ async def test_get_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4382,13 +4249,12 @@ async def test_get_exclusion_field_headers_async(): def test_get_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_exclusion(name="name_value",) @@ -4397,12 +4263,11 @@ def test_get_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4414,7 +4279,9 @@ def test_get_exclusion_flattened_error(): @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_exclusion), "__call__") as call: @@ -4432,13 +4299,14 @@ async def test_get_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4452,7 +4320,7 @@ def test_create_exclusion( transport: str = "grpc", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4468,25 +4336,18 @@ def test_create_exclusion( filter="filter_value", disabled=True, ) - response = client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4498,7 +4359,7 @@ def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4506,7 +4367,6 @@ def test_create_exclusion_empty_call(): client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() @@ -4515,7 +4375,7 @@ async def test_create_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.CreateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4533,24 +4393,18 @@ async def test_create_exclusion_async( disabled=True, ) ) - response = await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4560,17 +4414,17 @@ async def test_create_exclusion_async_from_dict(): def test_create_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4585,11 +4439,14 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio async def test_create_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4597,7 +4454,6 @@ async def test_create_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4611,13 +4467,12 @@ async def test_create_exclusion_field_headers_async(): def test_create_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_exclusion( @@ -4629,14 +4484,12 @@ def test_create_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") def test_create_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4650,7 +4503,9 @@ def test_create_exclusion_flattened_error(): @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_exclusion), "__call__") as call: @@ -4671,15 +4526,15 @@ async def test_create_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4695,7 +4550,7 @@ def test_update_exclusion( transport: str = "grpc", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4711,25 +4566,18 @@ def test_update_exclusion( filter="filter_value", disabled=True, ) - response = client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4741,7 +4589,7 @@ def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4749,7 +4597,6 @@ def test_update_exclusion_empty_call(): client.update_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() @@ -4758,7 +4605,7 @@ async def test_update_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.UpdateExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4776,24 +4623,18 @@ async def test_update_exclusion_async( disabled=True, ) ) - response = await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.disabled is True @@ -4803,17 +4644,17 @@ async def test_update_exclusion_async_from_dict(): def test_update_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: call.return_value = logging_config.LogExclusion() - client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4828,11 +4669,14 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio async def test_update_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4840,7 +4684,6 @@ async def test_update_exclusion_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.LogExclusion() ) - await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -4854,35 +4697,31 @@ async def test_update_exclusion_field_headers_async(): def test_update_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_exclusion( name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4891,13 +4730,15 @@ def test_update_exclusion_flattened_error(): logging_config.UpdateExclusionRequest(), name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_exclusion), "__call__") as call: @@ -4912,24 +4753,23 @@ async def test_update_exclusion_flattened_async(): response = await client.update_exclusion( name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4938,7 +4778,7 @@ async def test_update_exclusion_flattened_error_async(): logging_config.UpdateExclusionRequest(), name="name_value", exclusion=logging_config.LogExclusion(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @@ -4946,7 +4786,7 @@ def test_delete_exclusion( transport: str = "grpc", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4957,13 +4797,11 @@ def test_delete_exclusion( with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() # Establish that the response is the type that we expect. @@ -4978,7 +4816,7 @@ def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4986,7 +4824,6 @@ def test_delete_exclusion_empty_call(): client.delete_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() @@ -4995,7 +4832,7 @@ async def test_delete_exclusion_async( transport: str = "grpc_asyncio", request_type=logging_config.DeleteExclusionRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5006,13 +4843,11 @@ async def test_delete_exclusion_async( with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() # Establish that the response is the type that we expect. @@ -5025,17 +4860,17 @@ async def test_delete_exclusion_async_from_dict(): def test_delete_exclusion_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = None - client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -5050,17 +4885,19 @@ def test_delete_exclusion_field_headers(): @pytest.mark.asyncio async def test_delete_exclusion_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. @@ -5074,13 +4911,12 @@ async def test_delete_exclusion_field_headers_async(): def test_delete_exclusion_flattened(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_exclusion(name="name_value",) @@ -5089,12 +4925,11 @@ def test_delete_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_exclusion_flattened_error(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5106,7 +4941,9 @@ def test_delete_exclusion_flattened_error(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_exclusion), "__call__") as call: @@ -5122,13 +4959,14 @@ async def test_delete_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5142,7 +4980,7 @@ def test_get_cmek_settings( transport: str = "grpc", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5159,23 +4997,17 @@ def test_get_cmek_settings( kms_key_name="kms_key_name_value", service_account_id="service_account_id_value", ) - response = client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5187,7 +5019,7 @@ def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5197,7 +5029,6 @@ def test_get_cmek_settings_empty_call(): client.get_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() @@ -5206,7 +5037,7 @@ async def test_get_cmek_settings_async( transport: str = "grpc_asyncio", request_type=logging_config.GetCmekSettingsRequest ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5225,22 +5056,17 @@ async def test_get_cmek_settings_async( service_account_id="service_account_id_value", ) ) - response = await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5250,11 +5076,12 @@ async def test_get_cmek_settings_async_from_dict(): def test_get_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5262,7 +5089,6 @@ def test_get_cmek_settings_field_headers(): type(client.transport.get_cmek_settings), "__call__" ) as call: call.return_value = logging_config.CmekSettings() - client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5277,11 +5103,14 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_get_cmek_settings_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5291,7 +5120,6 @@ async def test_get_cmek_settings_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.CmekSettings() ) - await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5308,7 +5136,7 @@ def test_update_cmek_settings( transport: str = "grpc", request_type=logging_config.UpdateCmekSettingsRequest ): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5325,23 +5153,17 @@ def test_update_cmek_settings( kms_key_name="kms_key_name_value", service_account_id="service_account_id_value", ) - response = client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5353,7 +5175,7 @@ def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5363,7 +5185,6 @@ def test_update_cmek_settings_empty_call(): client.update_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() @@ -5373,7 +5194,7 @@ async def test_update_cmek_settings_async( request_type=logging_config.UpdateCmekSettingsRequest, ): client = ConfigServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -5392,22 +5213,17 @@ async def test_update_cmek_settings_async( service_account_id="service_account_id_value", ) ) - response = await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.service_account_id == "service_account_id_value" @@ -5417,11 +5233,12 @@ async def test_update_cmek_settings_async_from_dict(): def test_update_cmek_settings_field_headers(): - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5429,7 +5246,6 @@ def test_update_cmek_settings_field_headers(): type(client.transport.update_cmek_settings), "__call__" ) as call: call.return_value = logging_config.CmekSettings() - client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5444,11 +5260,14 @@ def test_update_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_update_cmek_settings_field_headers_async(): - client = ConfigServiceV2AsyncClient(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -5458,7 +5277,6 @@ async def test_update_cmek_settings_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_config.CmekSettings() ) - await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. @@ -5474,16 +5292,16 @@ async def test_update_cmek_settings_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -5493,7 +5311,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -5504,7 +5322,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = ConfigServiceV2Client(transport=transport) assert client.transport is transport @@ -5513,13 +5331,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -5534,23 +5352,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = ConfigServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.ConfigServiceV2GrpcTransport,) def test_config_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ConfigServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -5562,7 +5380,7 @@ def test_config_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.ConfigServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -5597,15 +5415,42 @@ def test_config_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -5623,19 +5468,38 @@ def test_config_service_v2_base_transport_with_credentials_file(): def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) ConfigServiceV2Client() adc.assert_called_once_with( scopes=( @@ -5648,14 +5512,46 @@ def test_config_service_v2_auth_adc(): ) -def test_config_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.ConfigServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -5667,6 +5563,125 @@ def test_config_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_config_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_config_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -5675,7 +5690,7 @@ def test_config_service_v2_transport_auth_adc(): ], ) def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -5719,7 +5734,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls(transport_ def test_config_service_v2_host_no_port(): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -5729,7 +5744,7 @@ def test_config_service_v2_host_no_port(): def test_config_service_v2_host_with_port(): client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -5785,9 +5800,9 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -5873,7 +5888,6 @@ def test_config_service_v2_transport_channel_mtls_with_adc(transport_class): def test_cmek_settings_path(): project = "squid" - expected = "projects/{project}/cmekSettings".format(project=project,) actual = ConfigServiceV2Client.cmek_settings_path(project) assert expected == actual @@ -5894,7 +5908,6 @@ def test_log_bucket_path(): project = "whelk" location = "octopus" bucket = "oyster" - expected = "projects/{project}/locations/{location}/buckets/{bucket}".format( project=project, location=location, bucket=bucket, ) @@ -5918,7 +5931,6 @@ def test_parse_log_bucket_path(): def test_log_exclusion_path(): project = "winkle" exclusion = "nautilus" - expected = "projects/{project}/exclusions/{exclusion}".format( project=project, exclusion=exclusion, ) @@ -5941,7 +5953,6 @@ def test_parse_log_exclusion_path(): def test_log_sink_path(): project = "squid" sink = "clam" - expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink,) actual = ConfigServiceV2Client.log_sink_path(project, sink) assert expected == actual @@ -5964,7 +5975,6 @@ def test_log_view_path(): location = "nudibranch" bucket = "cuttlefish" view = "mussel" - expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format( project=project, location=location, bucket=bucket, view=view, ) @@ -5988,7 +5998,6 @@ def test_parse_log_view_path(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -6009,7 +6018,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -6028,7 +6036,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -6047,7 +6054,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -6067,7 +6073,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -6094,7 +6099,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.ConfigServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = ConfigServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -6103,6 +6108,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = ConfigServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 66f22621c..5de01cf21 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,14 +23,14 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth -from google.api import monitored_resource_pb2 as monitored_resource # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import ( LoggingServiceV2AsyncClient, @@ -39,15 +38,45 @@ from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.services.logging_service_v2 import transports +from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.logging_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging -from google.logging.type import http_request_pb2 as http_request # type: ignore -from google.logging.type import log_severity_pb2 as log_severity # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import struct_pb2 as struct # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -99,7 +128,7 @@ def test__get_default_mtls_endpoint(): "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] ) def test_logging_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -116,7 +145,7 @@ def test_logging_service_v2_client_from_service_account_info(client_class): "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient,] ) def test_logging_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -169,7 +198,7 @@ def test_logging_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(LoggingServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -465,7 +494,7 @@ def test_logging_service_v2_client_client_options_from_dict(): def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogRequest): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -476,13 +505,11 @@ def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogReque with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_log(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() # Establish that the response is the type that we expect. @@ -497,7 +524,7 @@ def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -505,7 +532,6 @@ def test_delete_log_empty_call(): client.delete_log() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() @@ -514,7 +540,7 @@ async def test_delete_log_async( transport: str = "grpc_asyncio", request_type=logging.DeleteLogRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -525,13 +551,11 @@ async def test_delete_log_async( with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() # Establish that the response is the type that we expect. @@ -544,17 +568,17 @@ async def test_delete_log_async_from_dict(): def test_delete_log_field_headers(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() + request.log_name = "log_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = None - client.delete_log(request) # Establish that the underlying gRPC stub method was called. @@ -570,18 +594,18 @@ def test_delete_log_field_headers(): @pytest.mark.asyncio async def test_delete_log_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() + request.log_name = "log_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log(request) # Establish that the underlying gRPC stub method was called. @@ -595,13 +619,12 @@ async def test_delete_log_field_headers_async(): def test_delete_log_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_log), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log(log_name="log_name_value",) @@ -610,12 +633,11 @@ def test_delete_log_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" def test_delete_log_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -628,7 +650,7 @@ def test_delete_log_flattened_error(): @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -645,14 +667,13 @@ async def test_delete_log_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -667,7 +688,7 @@ def test_write_log_entries( transport: str = "grpc", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -680,17 +701,14 @@ def test_write_log_entries( ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() - response = client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging.WriteLogEntriesResponse) @@ -702,7 +720,7 @@ def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -712,7 +730,6 @@ def test_write_log_entries_empty_call(): client.write_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() @@ -721,7 +738,7 @@ async def test_write_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.WriteLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -736,13 +753,11 @@ async def test_write_log_entries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.WriteLogEntriesResponse() ) - response = await client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() # Establish that the response is the type that we expect. @@ -755,7 +770,7 @@ async def test_write_log_entries_async_from_dict(): def test_write_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -763,12 +778,11 @@ def test_write_log_entries_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging.WriteLogEntriesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.write_log_entries( log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -777,20 +791,16 @@ def test_write_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - - assert args[0].resource == monitored_resource.MonitoredResource( + assert args[0].resource == monitored_resource_pb2.MonitoredResource( type="type__value" ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] def test_write_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -798,7 +808,7 @@ def test_write_log_entries_flattened_error(): client.write_log_entries( logging.WriteLogEntriesRequest(), log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -807,7 +817,7 @@ def test_write_log_entries_flattened_error(): @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -824,7 +834,7 @@ async def test_write_log_entries_flattened_async(): # using the keyword arguments to the method. response = await client.write_log_entries( log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -833,22 +843,18 @@ async def test_write_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - - assert args[0].resource == monitored_resource.MonitoredResource( + assert args[0].resource == monitored_resource_pb2.MonitoredResource( type="type__value" ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -857,7 +863,7 @@ async def test_write_log_entries_flattened_error_async(): await client.write_log_entries( logging.WriteLogEntriesRequest(), log_name="log_name_value", - resource=monitored_resource.MonitoredResource(type="type__value"), + resource=monitored_resource_pb2.MonitoredResource(type="type__value"), labels={"key_value": "value_value"}, entries=[log_entry.LogEntry(log_name="log_name_value")], ) @@ -867,7 +873,7 @@ def test_list_log_entries( transport: str = "grpc", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -880,19 +886,15 @@ def test_list_log_entries( call.return_value = logging.ListLogEntriesResponse( next_page_token="next_page_token_value", ) - response = client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == "next_page_token_value" @@ -904,7 +906,7 @@ def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -912,7 +914,6 @@ def test_list_log_entries_empty_call(): client.list_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() @@ -921,7 +922,7 @@ async def test_list_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.ListLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -934,18 +935,15 @@ async def test_list_log_entries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.ListLogEntriesResponse(next_page_token="next_page_token_value",) ) - response = await client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -955,13 +953,12 @@ async def test_list_log_entries_async_from_dict(): def test_list_log_entries_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogEntriesResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_entries( @@ -974,16 +971,13 @@ def test_list_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" def test_list_log_entries_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -999,7 +993,7 @@ def test_list_log_entries_flattened_error(): @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1022,18 +1016,15 @@ async def test_list_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1048,7 +1039,7 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1083,7 +1074,7 @@ def test_list_log_entries_pager(): def test_list_log_entries_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1113,7 +1104,9 @@ def test_list_log_entries_pages(): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1150,7 +1143,9 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1187,7 +1182,7 @@ def test_list_monitored_resource_descriptors( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1202,19 +1197,15 @@ def test_list_monitored_resource_descriptors( call.return_value = logging.ListMonitoredResourceDescriptorsResponse( next_page_token="next_page_token_value", ) - response = client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == "next_page_token_value" @@ -1226,7 +1217,7 @@ def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1236,7 +1227,6 @@ def test_list_monitored_resource_descriptors_empty_call(): client.list_monitored_resource_descriptors() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() @@ -1246,7 +1236,7 @@ async def test_list_monitored_resource_descriptors_async( request_type=logging.ListMonitoredResourceDescriptorsRequest, ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1263,18 +1253,15 @@ async def test_list_monitored_resource_descriptors_async( next_page_token="next_page_token_value", ) ) - response = await client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1284,7 +1271,7 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1294,9 +1281,9 @@ def test_list_monitored_resource_descriptors_pager(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1305,14 +1292,14 @@ def test_list_monitored_resource_descriptors_pager(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1326,13 +1313,13 @@ def test_list_monitored_resource_descriptors_pager(): results = [i for i in pager] assert len(results) == 6 assert all( - isinstance(i, monitored_resource.MonitoredResourceDescriptor) + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) for i in results ) def test_list_monitored_resource_descriptors_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1342,9 +1329,9 @@ def test_list_monitored_resource_descriptors_pages(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1353,14 +1340,14 @@ def test_list_monitored_resource_descriptors_pages(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1372,7 +1359,9 @@ def test_list_monitored_resource_descriptors_pages(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1384,9 +1373,9 @@ async def test_list_monitored_resource_descriptors_async_pager(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1395,14 +1384,14 @@ async def test_list_monitored_resource_descriptors_async_pager(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1415,14 +1404,16 @@ async def test_list_monitored_resource_descriptors_async_pager(): assert len(responses) == 6 assert all( - isinstance(i, monitored_resource.MonitoredResourceDescriptor) + isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) for i in responses ) @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1434,9 +1425,9 @@ async def test_list_monitored_resource_descriptors_async_pages(): call.side_effect = ( logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="abc", ), @@ -1445,14 +1436,14 @@ async def test_list_monitored_resource_descriptors_async_pages(): ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], next_page_token="ghi", ), logging.ListMonitoredResourceDescriptorsResponse( resource_descriptors=[ - monitored_resource.MonitoredResourceDescriptor(), - monitored_resource.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), ], ), RuntimeError, @@ -1468,7 +1459,7 @@ async def test_list_monitored_resource_descriptors_async_pages(): def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1481,21 +1472,16 @@ def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest call.return_value = logging.ListLogsResponse( log_names=["log_names_value"], next_page_token="next_page_token_value", ) - response = client.list_logs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ["log_names_value"] - assert response.next_page_token == "next_page_token_value" @@ -1507,7 +1493,7 @@ def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1515,7 +1501,6 @@ def test_list_logs_empty_call(): client.list_logs() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() @@ -1524,7 +1509,7 @@ async def test_list_logs_async( transport: str = "grpc_asyncio", request_type=logging.ListLogsRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1539,20 +1524,16 @@ async def test_list_logs_async( log_names=["log_names_value"], next_page_token="next_page_token_value", ) ) - response = await client.list_logs(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) - assert response.log_names == ["log_names_value"] - assert response.next_page_token == "next_page_token_value" @@ -1562,17 +1543,17 @@ async def test_list_logs_async_from_dict(): def test_list_logs_field_headers(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: call.return_value = logging.ListLogsResponse() - client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -1588,12 +1569,13 @@ def test_list_logs_field_headers(): @pytest.mark.asyncio async def test_list_logs_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1601,7 +1583,6 @@ async def test_list_logs_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging.ListLogsResponse() ) - await client.list_logs(request) # Establish that the underlying gRPC stub method was called. @@ -1615,13 +1596,12 @@ async def test_list_logs_field_headers_async(): def test_list_logs_flattened(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging.ListLogsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_logs(parent="parent_value",) @@ -1630,12 +1610,11 @@ def test_list_logs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_logs_flattened_error(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1648,7 +1627,7 @@ def test_list_logs_flattened_error(): @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1667,14 +1646,13 @@ async def test_list_logs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1686,7 +1664,7 @@ async def test_list_logs_flattened_error_async(): def test_list_logs_pager(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1715,7 +1693,7 @@ def test_list_logs_pager(): def test_list_logs_pages(): - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1736,7 +1714,9 @@ def test_list_logs_pages(): @pytest.mark.asyncio async def test_list_logs_async_pager(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1764,7 +1744,9 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): - client = LoggingServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1791,26 +1773,23 @@ def test_tail_log_entries( transport: str = "grpc", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.tail_log_entries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([logging.TailLogEntriesResponse()]) - response = client.tail_log_entries(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -1827,13 +1806,12 @@ async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest ): client = LoggingServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. @@ -1843,13 +1821,11 @@ async def test_tail_log_entries_async( call.return_value.read = mock.AsyncMock( side_effect=[logging.TailLogEntriesResponse()] ) - response = await client.tail_log_entries(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -1865,16 +1841,16 @@ async def test_tail_log_entries_async_from_dict(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -1884,7 +1860,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -1895,7 +1871,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = LoggingServiceV2Client(transport=transport) assert client.transport is transport @@ -1904,13 +1880,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1925,23 +1901,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = LoggingServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.LoggingServiceV2GrpcTransport,) def test_logging_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.LoggingServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1953,7 +1929,7 @@ def test_logging_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.LoggingServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1971,15 +1947,43 @@ def test_logging_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1998,19 +2002,39 @@ def test_logging_service_v2_base_transport_with_credentials_file(): def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) LoggingServiceV2Client() adc.assert_called_once_with( scopes=( @@ -2024,14 +2048,47 @@ def test_logging_service_v2_auth_adc(): ) -def test_logging_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.LoggingServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -2044,6 +2101,127 @@ def test_logging_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_logging_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_logging_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -2052,7 +2230,7 @@ def test_logging_service_v2_transport_auth_adc(): ], ) def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2097,7 +2275,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls(transport def test_logging_service_v2_host_no_port(): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -2107,7 +2285,7 @@ def test_logging_service_v2_host_no_port(): def test_logging_service_v2_host_with_port(): client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -2163,9 +2341,9 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2254,7 +2432,6 @@ def test_logging_service_v2_transport_channel_mtls_with_adc(transport_class): def test_log_path(): project = "squid" log = "clam" - expected = "projects/{project}/logs/{log}".format(project=project, log=log,) actual = LoggingServiceV2Client.log_path(project, log) assert expected == actual @@ -2274,7 +2451,6 @@ def test_parse_log_path(): def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2295,7 +2471,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) actual = LoggingServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2314,7 +2489,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) actual = LoggingServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2333,7 +2507,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) actual = LoggingServiceV2Client.common_project_path(project) assert expected == actual @@ -2353,7 +2526,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2380,7 +2552,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.LoggingServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = LoggingServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2389,6 +2561,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 6faec201e..a8a420a28 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,18 +23,17 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth -from google.api import distribution_pb2 as distribution # type: ignore -from google.api import label_pb2 as label # type: ignore -from google.api import launch_stage_pb2 as launch_stage # type: ignore -from google.api import metric_pb2 as ga_metric # type: ignore -from google.api import metric_pb2 as metric # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import ( MetricsServiceV2AsyncClient, @@ -43,10 +41,40 @@ from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports +from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.logging_v2.services.metrics_service_v2.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.logging_v2.types import logging_metrics from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -98,7 +126,7 @@ def test__get_default_mtls_endpoint(): "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] ) def test_metrics_service_v2_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -115,7 +143,7 @@ def test_metrics_service_v2_client_from_service_account_info(client_class): "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient,] ) def test_metrics_service_v2_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -168,7 +196,7 @@ def test_metrics_service_v2_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(MetricsServiceV2Client, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -466,7 +494,7 @@ def test_list_log_metrics( transport: str = "grpc", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -479,19 +507,15 @@ def test_list_log_metrics( call.return_value = logging_metrics.ListLogMetricsResponse( next_page_token="next_page_token_value", ) - response = client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == "next_page_token_value" @@ -503,7 +527,7 @@ def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -511,7 +535,6 @@ def test_list_log_metrics_empty_call(): client.list_log_metrics() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() @@ -520,7 +543,7 @@ async def test_list_log_metrics_async( transport: str = "grpc_asyncio", request_type=logging_metrics.ListLogMetricsRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -535,18 +558,15 @@ async def test_list_log_metrics_async( next_page_token="next_page_token_value", ) ) - response = await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -556,17 +576,17 @@ async def test_list_log_metrics_async_from_dict(): def test_list_log_metrics_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: call.return_value = logging_metrics.ListLogMetricsResponse() - client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -582,12 +602,13 @@ def test_list_log_metrics_field_headers(): @pytest.mark.asyncio async def test_list_log_metrics_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -595,7 +616,6 @@ async def test_list_log_metrics_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.ListLogMetricsResponse() ) - await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. @@ -609,13 +629,12 @@ async def test_list_log_metrics_field_headers_async(): def test_list_log_metrics_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_log_metrics(parent="parent_value",) @@ -624,12 +643,11 @@ def test_list_log_metrics_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_log_metrics_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -642,7 +660,7 @@ def test_list_log_metrics_flattened_error(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -661,14 +679,13 @@ async def test_list_log_metrics_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -680,7 +697,7 @@ async def test_list_log_metrics_flattened_error_async(): def test_list_log_metrics_pager(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -718,7 +735,7 @@ def test_list_log_metrics_pager(): def test_list_log_metrics_pages(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -748,7 +765,9 @@ def test_list_log_metrics_pages(): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): - client = MetricsServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -785,7 +804,9 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): - client = MetricsServiceV2AsyncClient(credentials=credentials.AnonymousCredentials,) + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -821,7 +842,7 @@ def test_get_log_metric( transport: str = "grpc", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -838,27 +859,19 @@ def test_get_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -870,7 +883,7 @@ def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -878,7 +891,6 @@ def test_get_log_metric_empty_call(): client.get_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() @@ -887,7 +899,7 @@ async def test_get_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.GetLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -906,26 +918,19 @@ async def test_get_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -935,17 +940,17 @@ async def test_get_log_metric_async_from_dict(): def test_get_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: call.return_value = logging_metrics.LogMetric() - client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -961,12 +966,13 @@ def test_get_log_metric_field_headers(): @pytest.mark.asyncio async def test_get_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -974,7 +980,6 @@ async def test_get_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -988,13 +993,12 @@ async def test_get_log_metric_field_headers_async(): def test_get_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_log_metric), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_log_metric(metric_name="metric_name_value",) @@ -1003,12 +1007,11 @@ def test_get_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" def test_get_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1021,7 +1024,7 @@ def test_get_log_metric_flattened_error(): @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1040,14 +1043,13 @@ async def test_get_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1062,7 +1064,7 @@ def test_create_log_metric( transport: str = "grpc", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1081,27 +1083,19 @@ def test_create_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1113,7 +1107,7 @@ def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1123,7 +1117,6 @@ def test_create_log_metric_empty_call(): client.create_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() @@ -1132,7 +1125,7 @@ async def test_create_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.CreateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1153,26 +1146,19 @@ async def test_create_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1182,11 +1168,12 @@ async def test_create_log_metric_async_from_dict(): def test_create_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1194,7 +1181,6 @@ def test_create_log_metric_field_headers(): type(client.transport.create_log_metric), "__call__" ) as call: call.return_value = logging_metrics.LogMetric() - client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1210,12 +1196,13 @@ def test_create_log_metric_field_headers(): @pytest.mark.asyncio async def test_create_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1225,7 +1212,6 @@ async def test_create_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1239,7 +1225,7 @@ async def test_create_log_metric_field_headers_async(): def test_create_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1247,7 +1233,6 @@ def test_create_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_log_metric( @@ -1258,14 +1243,12 @@ def test_create_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") def test_create_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1280,7 +1263,7 @@ def test_create_log_metric_flattened_error(): @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1303,16 +1286,14 @@ async def test_create_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1329,7 +1310,7 @@ def test_update_log_metric( transport: str = "grpc", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1348,27 +1329,19 @@ def test_update_log_metric( value_extractor="value_extractor_value", version=logging_metrics.LogMetric.ApiVersion.V1, ) - response = client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1380,7 +1353,7 @@ def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1390,7 +1363,6 @@ def test_update_log_metric_empty_call(): client.update_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() @@ -1399,7 +1371,7 @@ async def test_update_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.UpdateLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1420,26 +1392,19 @@ async def test_update_log_metric_async( version=logging_metrics.LogMetric.ApiVersion.V1, ) ) - response = await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) - assert response.name == "name_value" - assert response.description == "description_value" - assert response.filter == "filter_value" - assert response.value_extractor == "value_extractor_value" - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1449,11 +1414,12 @@ async def test_update_log_metric_async_from_dict(): def test_update_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1461,7 +1427,6 @@ def test_update_log_metric_field_headers(): type(client.transport.update_log_metric), "__call__" ) as call: call.return_value = logging_metrics.LogMetric() - client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1477,12 +1442,13 @@ def test_update_log_metric_field_headers(): @pytest.mark.asyncio async def test_update_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1492,7 +1458,6 @@ async def test_update_log_metric_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( logging_metrics.LogMetric() ) - await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1506,7 +1471,7 @@ async def test_update_log_metric_field_headers_async(): def test_update_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1514,7 +1479,6 @@ def test_update_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_log_metric( @@ -1526,14 +1490,12 @@ def test_update_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") def test_update_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1548,7 +1510,7 @@ def test_update_log_metric_flattened_error(): @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1572,16 +1534,14 @@ async def test_update_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1598,7 +1558,7 @@ def test_delete_log_metric( transport: str = "grpc", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1611,13 +1571,11 @@ def test_delete_log_metric( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() # Establish that the response is the type that we expect. @@ -1632,7 +1590,7 @@ def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1642,7 +1600,6 @@ def test_delete_log_metric_empty_call(): client.delete_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() @@ -1651,7 +1608,7 @@ async def test_delete_log_metric_async( transport: str = "grpc_asyncio", request_type=logging_metrics.DeleteLogMetricRequest ): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1664,13 +1621,11 @@ async def test_delete_log_metric_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() # Establish that the response is the type that we expect. @@ -1683,11 +1638,12 @@ async def test_delete_log_metric_async_from_dict(): def test_delete_log_metric_field_headers(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1695,7 +1651,6 @@ def test_delete_log_metric_field_headers(): type(client.transport.delete_log_metric), "__call__" ) as call: call.return_value = None - client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1711,12 +1666,13 @@ def test_delete_log_metric_field_headers(): @pytest.mark.asyncio async def test_delete_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() + request.metric_name = "metric_name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1724,7 +1680,6 @@ async def test_delete_log_metric_field_headers_async(): type(client.transport.delete_log_metric), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. @@ -1738,7 +1693,7 @@ async def test_delete_log_metric_field_headers_async(): def test_delete_log_metric_flattened(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1746,7 +1701,6 @@ def test_delete_log_metric_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_log_metric(metric_name="metric_name_value",) @@ -1755,12 +1709,11 @@ def test_delete_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" def test_delete_log_metric_flattened_error(): - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1773,7 +1726,7 @@ def test_delete_log_metric_flattened_error(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1792,14 +1745,13 @@ async def test_delete_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1813,16 +1765,16 @@ async def test_delete_log_metric_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -1832,7 +1784,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -1843,7 +1795,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = MetricsServiceV2Client(transport=transport) assert client.transport is transport @@ -1852,13 +1804,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1873,23 +1825,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = MetricsServiceV2Client(credentials=credentials.AnonymousCredentials(),) + client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.MetricsServiceV2GrpcTransport,) def test_metrics_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MetricsServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1901,7 +1853,7 @@ def test_metrics_service_v2_base_transport(): ) as Transport: Transport.return_value = None transport = transports.MetricsServiceV2Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1918,15 +1870,43 @@ def test_metrics_service_v2_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1945,19 +1925,39 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricsServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) MetricsServiceV2Client() adc.assert_called_once_with( scopes=( @@ -1971,14 +1971,47 @@ def test_metrics_service_v2_auth_adc(): ) -def test_metrics_service_v2_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.MetricsServiceV2GrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -1991,6 +2024,127 @@ def test_metrics_service_v2_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_metrics_service_v2_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + "https://www.googleapis.com/auth/logging.admin", + "https://www.googleapis.com/auth/logging.read", + "https://www.googleapis.com/auth/logging.write", + ), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_metrics_service_v2_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -1999,7 +2153,7 @@ def test_metrics_service_v2_transport_auth_adc(): ], ) def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2044,7 +2198,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls(transport def test_metrics_service_v2_host_no_port(): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com" ), @@ -2054,7 +2208,7 @@ def test_metrics_service_v2_host_no_port(): def test_metrics_service_v2_host_with_port(): client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="logging.googleapis.com:8000" ), @@ -2110,9 +2264,9 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2201,7 +2355,6 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc(transport_class): def test_log_metric_path(): project = "squid" metric = "clam" - expected = "projects/{project}/metrics/{metric}".format( project=project, metric=metric, ) @@ -2223,7 +2376,6 @@ def test_parse_log_metric_path(): def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2244,7 +2396,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) actual = MetricsServiceV2Client.common_folder_path(folder) assert expected == actual @@ -2263,7 +2414,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) actual = MetricsServiceV2Client.common_organization_path(organization) assert expected == actual @@ -2282,7 +2432,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) actual = MetricsServiceV2Client.common_project_path(project) assert expected == actual @@ -2302,7 +2451,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2329,7 +2477,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.MetricsServiceV2Transport, "_prep_wrapped_messages" ) as prep: client = MetricsServiceV2Client( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2338,6 +2486,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = MetricsServiceV2Client.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index 4ba052121..b7fef1b9e 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -291,7 +291,9 @@ def test_emit(self): ) logname = "loggername" message = "hello world" - record = logging.LogRecord(logname, logging, None, None, message, None, None) + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) handler.handle(record) self.assertEqual( handler.transport.send_called_with, @@ -315,7 +317,9 @@ def test_emit_manual_field_override(self): ) logname = "loggername" message = "hello world" - record = logging.LogRecord(logname, logging, None, None, message, None, None) + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) # set attributes manually expected_trace = "123" setattr(record, "trace", expected_trace) @@ -350,6 +354,53 @@ def test_emit_manual_field_override(self): ), ) + def test_emit_with_custom_formatter(self): + """ + Handler should respect custom formatters attached + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + logFormatter = logging.Formatter(fmt="%(name)s :: %(levelname)s :: %(message)s") + handler.setFormatter(logFormatter) + message = "test" + expected_result = "logname :: INFO :: test" + record = logging.LogRecord( + "logname", logging.INFO, None, None, message, None, None + ) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ) + + def test_format_with_arguments(self): + """ + Handler should support format string arguments + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + message = "name: %s" + name_arg = "Daniel" + expected_result = "name: Daniel" + record = logging.LogRecord( + None, logging.INFO, None, None, message, name_arg, None + ) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ) + class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index 0536583a5..3d1c11ab0 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -109,7 +109,6 @@ def test_format_with_quotes(self): When logging a message containing quotes, escape chars should be added """ import logging - import json handler = self._make_one() message = '"test"' @@ -117,9 +116,60 @@ def test_format_with_quotes(self): record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) record.created = None handler.filter(record) - result = json.loads(handler.format(record)) - result["message"] = expected_result - self.assertEqual(result["message"], expected_result) + result = handler.format(record) + self.assertIn(expected_result, result) + + def test_format_with_line_break(self): + """ + When logging a message containing \n, it should be properly escaped + """ + import logging + + handler = self._make_one() + message = "test\ntest" + expected_result = "test\\ntest" + record = logging.LogRecord(None, logging.INFO, None, None, message, None, None,) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + + def test_format_with_custom_formatter(self): + """ + Handler should respect custom formatters attached + """ + import logging + + handler = self._make_one() + logFormatter = logging.Formatter(fmt="%(name)s :: %(levelname)s :: %(message)s") + handler.setFormatter(logFormatter) + message = "test" + expected_result = "logname :: INFO :: test" + record = logging.LogRecord( + "logname", logging.INFO, None, None, message, None, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + + def test_format_with_arguments(self): + """ + Handler should support format string arguments + """ + import logging + + handler = self._make_one() + message = "name: %s" + name_arg = "Daniel" + expected_result = "name: Daniel" + record = logging.LogRecord( + None, logging.INFO, None, None, message, name_arg, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) def test_format_with_request(self): import logging diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py index ef90b8159..b8795b8ce 100644 --- a/tests/unit/test_entries.py +++ b/tests/unit/test_entries.py @@ -503,6 +503,20 @@ def test_to_api_repr_defaults(self): } self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_struct(self): + from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + LOG_NAME = "struct.log" + message = Struct(fields={"foo": Value(bool_value=True)}) + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + "logName": LOG_NAME, + "jsonPayload": message, + "resource": _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_explicit(self): import datetime from google.cloud.logging import Resource