From 38106f398dab55e2266c2686305f90194aed9cc9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 10:52:05 -0400 Subject: [PATCH 01/19] chore: [autoapprove] Update `black` and `isort` to latest versions (#795) Source-Link: https://github.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 6 ++-- .pre-commit-config.yaml | 2 +- google/cloud/logging_v2/_http.py | 1 - google/cloud/logging_v2/handlers/handlers.py | 2 +- noxfile.py | 34 ++++++++++--------- tests/system/test_system.py | 1 - .../handlers/test__monitored_resources.py | 3 -- tests/unit/handlers/test_handlers.py | 4 +-- tests/unit/handlers/test_structured_log.py | 14 ++++---- tests/unit/handlers/transports/test_base.py | 1 - tests/unit/handlers/transports/test_sync.py | 1 - tests/unit/test__http.py | 5 --- tests/unit/test__instrumentation.py | 1 - tests/unit/test_client.py | 2 -- tests/unit/test_entries.py | 4 --- tests/unit/test_logger.py | 5 --- tests/unit/test_metric.py | 1 - tests/unit/test_sink.py | 1 - 19 files changed, 33 insertions(+), 59 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a9bdb1b7a..dd98abbde 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 96d593c8c..0332d3267 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 19409cbd3..6a8e16950 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/google/cloud/logging_v2/_http.py b/google/cloud/logging_v2/_http.py index 581dce35e..b90789353 100644 --- a/google/cloud/logging_v2/_http.py +++ b/google/cloud/logging_v2/_http.py @@ -26,7 +26,6 @@ class Connection(_http.JSONConnection): - DEFAULT_API_ENDPOINT = "https://logging.googleapis.com" def __init__(self, client, *, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index 28960ae71..ce5822fcd 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -70,7 +70,7 @@ def _infer_source_location(record): ("function", "funcName"), ] output = {} - for (gcp_name, std_lib_name) in name_map: + for gcp_name, std_lib_name in name_map: value = getattr(record, std_lib_name, None) if value is not None: output[gcp_name] = value diff --git a/noxfile.py b/noxfile.py index 7ebe500a3..565df040b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,22 +17,24 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -40,32 +42,32 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "flask", "webob", "django", ] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "google-cloud-bigquery", "google-cloud-pubsub", "google-cloud-storage", "google-cloud-testutils", ] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -78,6 +80,7 @@ "lint_setup_py", "blacken", "docs", + "format", ] # Error if a python version is missing @@ -196,7 +199,6 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 8d39408d3..ba7fd6c2a 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -118,7 +118,6 @@ def setUpModule(): class TestLogging(unittest.TestCase): - JSON_PAYLOAD = { "message": "System test: test_log_struct", "weather": { diff --git a/tests/unit/handlers/test__monitored_resources.py b/tests/unit/handlers/test__monitored_resources.py index 16378fd50..838543253 100644 --- a/tests/unit/handlers/test__monitored_resources.py +++ b/tests/unit/handlers/test__monitored_resources.py @@ -45,7 +45,6 @@ class Test_Create_Resources(unittest.TestCase): - PROJECT = "test-project" LOCATION = "test-location" NAME = "test-name" @@ -135,7 +134,6 @@ def test_functions_resource_no_name(self): self.assertEqual(func_resource.labels["function_name"], "") def test_create_kubernetes_resource(self): - patch = mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", wraps=self._mock_metadata, @@ -246,7 +244,6 @@ def test_with_no_project_from_server(self): class Test_Resource_Detection(unittest.TestCase): - PROJECT = "test-project" def _mock_k8s_metadata(self, endpoint): diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index 1e431f1aa..1f86a8e37 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -25,7 +25,6 @@ class TestCloudLoggingFilter(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod @@ -291,7 +290,6 @@ def test_user_overrides(self): class TestCloudLoggingHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod @@ -859,7 +857,7 @@ def test_json_fields_input_unmodified(self): _format_and_parse_message(record, handler) # ensure json_fields has no side-effects self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys())) - for (key, value) in json_fields_orig.items(): + for key, value in json_fields_orig.items(): self.assertEqual( value, json_fields[key], f"expected_payload[{key}] != result[{key}]" ) diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index 353530ed1..fc6b7c598 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -86,7 +86,7 @@ def test_format(self): } handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) self.assertEqual( len(expected_payload.keys()), @@ -121,7 +121,7 @@ def test_format_minimal(self): handler.filter(record) result = json.loads(handler.format(record)) self.assertEqual(set(expected_payload.keys()), set(result.keys())) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" ) @@ -304,7 +304,7 @@ def test_format_with_reserved_json_field(self): handler.filter(record) result = json.loads(handler.format(record)) self.assertEqual(set(expected_payload.keys()), set(result.keys())) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" ) @@ -417,7 +417,7 @@ def test_format_with_request(self): ): handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_with_traceparent(self): @@ -452,7 +452,7 @@ def test_format_with_traceparent(self): ): handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_overrides(self): @@ -509,7 +509,7 @@ def test_format_overrides(self): ) handler.filter(record) result = json.loads(handler.format(record)) - for (key, value) in expected_payload.items(): + for key, value in expected_payload.items(): self.assertEqual(value, result[key]) def test_format_with_json_fields(self): @@ -590,7 +590,7 @@ def test_json_fields_input_unmodified(self): handler.format(record) # ensure json_fields has no side-effects self.assertEqual(set(json_fields.keys()), set(json_fields_orig.keys())) - for (key, value) in json_fields_orig.items(): + for key, value in json_fields_orig.items(): self.assertEqual( value, json_fields[key], f"expected_payload[{key}] != result[{key}]" ) diff --git a/tests/unit/handlers/transports/test_base.py b/tests/unit/handlers/transports/test_base.py index 71ef1366a..a0013cadf 100644 --- a/tests/unit/handlers/transports/test_base.py +++ b/tests/unit/handlers/transports/test_base.py @@ -16,7 +16,6 @@ class TestBaseHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod diff --git a/tests/unit/handlers/transports/test_sync.py b/tests/unit/handlers/transports/test_sync.py index 752a96d9f..01a949d24 100644 --- a/tests/unit/handlers/transports/test_sync.py +++ b/tests/unit/handlers/transports/test_sync.py @@ -17,7 +17,6 @@ class TestSyncHandler(unittest.TestCase): - PROJECT = "PROJECT" @staticmethod diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index f9b60cfa6..0e83bd82c 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -24,7 +24,6 @@ def _make_credentials(): class TestConnection(unittest.TestCase): - PROJECT = "project" FILTER = "logName:syslog AND severity>=ERROR" @@ -96,7 +95,6 @@ def test_extra_headers(self): class Test_LoggingAPI(unittest.TestCase): - PROJECT = "project" PROJECT_PATH = "projects/project" LIST_ENTRIES_PATH = "entries:list" @@ -354,7 +352,6 @@ def test_logger_delete(self): class Test_SinksAPI(unittest.TestCase): - PROJECT = "project" PROJECT_PATH = "projects/project" FILTER = "logName:syslog AND severity>=ERROR" @@ -636,7 +633,6 @@ def test_sink_delete_hit(self): class Test_MetricsAPI(unittest.TestCase): - PROJECT = "project" FILTER = "logName:syslog AND severity>=ERROR" LIST_METRICS_PATH = "projects/%s/metrics" % (PROJECT,) @@ -865,7 +861,6 @@ def test_metric_delete_hit(self): class _Connection(object): - _called_with = None _raise_conflict = False diff --git a/tests/unit/test__instrumentation.py b/tests/unit/test__instrumentation.py index dc330b0ca..a98aae34c 100644 --- a/tests/unit/test__instrumentation.py +++ b/tests/unit/test__instrumentation.py @@ -17,7 +17,6 @@ class TestInstrumentation(unittest.TestCase): - TEST_NAME = "python" # LONG_NAME > 14 characters LONG_NAME = TEST_NAME + "789ABCDEF" diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 1c47a343b..ec3130ac5 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -34,7 +34,6 @@ def _make_credentials(): class TestClient(unittest.TestCase): - PROJECT = "PROJECT" PROJECT_PATH = f"projects/{PROJECT}" LOGGER_NAME = "LOGGER_NAME" @@ -903,7 +902,6 @@ def test_setup_logging_w_extra_kwargs(self): class _Connection(object): - _called_with = None def __init__(self, *responses): diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py index 6f3af684f..4742f5574 100644 --- a/tests/unit/test_entries.py +++ b/tests/unit/test_entries.py @@ -79,7 +79,6 @@ def test_w_str(self): class TestLogEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -469,7 +468,6 @@ def test_to_api_repr_explicit(self): class TestTextEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -557,7 +555,6 @@ def test_to_api_repr_explicit(self): class TestStructEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" @@ -659,7 +656,6 @@ def test_to_api_repr_explicit(self): class TestProtobufEntry(unittest.TestCase): - PROJECT = "PROJECT" LOGGER_NAME = "LOGGER_NAME" diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 16c89959b..cdb56747d 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -28,7 +28,6 @@ def _make_credentials(): class TestLogger(unittest.TestCase): - PROJECT = "test-project" LOGGER_NAME = "logger-name" TIME_FORMAT = '"%Y-%m-%dT%H:%M:%S.%f%z"' @@ -1086,7 +1085,6 @@ def test_first_log_emits_instrumentation(self): class TestBatch(unittest.TestCase): - PROJECT = "test-project" @staticmethod @@ -1847,7 +1845,6 @@ def test_batch_error_gets_context(self): class _Logger(object): - labels = None def __init__(self, name="NAME", project="PROJECT"): @@ -1855,7 +1852,6 @@ def __init__(self, name="NAME", project="PROJECT"): class _DummyLoggingAPI(object): - _write_entries_called_with = None def write_entries( @@ -1909,7 +1905,6 @@ class _Bugout(Exception): class _Connection(object): - _called_with = None def __init__(self, *responses): diff --git a/tests/unit/test_metric.py b/tests/unit/test_metric.py index 83b49d02d..f36ae3b2a 100644 --- a/tests/unit/test_metric.py +++ b/tests/unit/test_metric.py @@ -16,7 +16,6 @@ class TestMetric(unittest.TestCase): - PROJECT = "test-project" METRIC_NAME = "metric-name" FULL_METRIC_NAME = f"projects/{PROJECT}/metrics/{METRIC_NAME}" diff --git a/tests/unit/test_sink.py b/tests/unit/test_sink.py index 1e4852ab5..b5005b057 100644 --- a/tests/unit/test_sink.py +++ b/tests/unit/test_sink.py @@ -16,7 +16,6 @@ class TestSink(unittest.TestCase): - PROJECT = "test-project" PROJECT_PATH = f"projects/{PROJECT}" SINK_NAME = "sink-name" From 7ae3d5968c61659c35c75774f8620c2f1c9f22ff Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 11 Oct 2023 00:16:07 +0200 Subject: [PATCH 02/19] chore(deps): update dependency google-cloud-logging to v3.8.0 (#793) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 618f93857..a8c7b9931 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==3.7.0 +google-cloud-logging==3.8.0 google-cloud-bigquery==3.12.0 google-cloud-storage==2.11.0 google-cloud-pubsub==2.18.4 From a00c261ee07a5dcaac9f5b966b4bb6729a2bbe65 Mon Sep 17 00:00:00 2001 From: gkevinzheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 18 Oct 2023 12:02:09 -0400 Subject: [PATCH 03/19] fix: Updated protobuf JSON formatting to support nested protobufs (#797) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Updated protobuf JSON formatting to support nested protobufs * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Cleaner way to differentiate between proto objects and dict objects in to_api_repr * Fixed unused import. * Fixed failing unit test. --------- Co-authored-by: Owl Bot --- google/cloud/logging_v2/entries.py | 12 ++++----- tests/system/test_system.py | 7 ++++++ tests/unit/test_entries.py | 39 ++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 6 deletions(-) diff --git a/google/cloud/logging_v2/entries.py b/google/cloud/logging_v2/entries.py index 9db020f67..d8a877738 100644 --- a/google/cloud/logging_v2/entries.py +++ b/google/cloud/logging_v2/entries.py @@ -18,9 +18,9 @@ import json import re -from google.protobuf.any_pb2 import Any from google.protobuf.json_format import MessageToDict from google.protobuf.json_format import Parse +from google.protobuf.message import Message from google.cloud.logging_v2.resource import Resource from google.cloud._helpers import _name_from_project_path @@ -325,7 +325,7 @@ def _extract_payload(cls, resource): @property def payload_pb(self): - if isinstance(self.payload, Any): + if isinstance(self.payload, Message): return self.payload @property @@ -337,10 +337,10 @@ def to_api_repr(self): """API repr (JSON format) for entry.""" info = super(ProtobufEntry, self).to_api_repr() proto_payload = None - if self.payload_json: - proto_payload = dict(self.payload_json) - elif self.payload_pb: - proto_payload = MessageToDict(self.payload_pb) + if self.payload_pb: + proto_payload = MessageToDict(self.payload) + elif self.payload_json: + proto_payload = dict(self.payload) info["protoPayload"] = proto_payload return info diff --git a/tests/system/test_system.py b/tests/system/test_system.py index ba7fd6c2a..0d39aa0a9 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -192,6 +192,7 @@ def test_list_entry_with_auditlog(self): "methodName": "test", "resourceName": "test", "serviceName": "test", + "requestMetadata": {"callerIp": "127.0.0.1"}, } audit_struct = self._dict_to_struct(audit_dict) @@ -223,6 +224,12 @@ def test_list_entry_with_auditlog(self): protobuf_entry.to_api_repr()["protoPayload"]["methodName"], audit_dict["methodName"], ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["requestMetadata"][ + "callerIp" + ], + audit_dict["requestMetadata"]["callerIp"], + ) def test_list_entry_with_requestlog(self): """ diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py index 4742f5574..e7bf30d87 100644 --- a/tests/unit/test_entries.py +++ b/tests/unit/test_entries.py @@ -739,6 +739,45 @@ def test_to_api_repr_proto_defaults(self): } self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_proto_inner_struct_field(self): + from google.protobuf.json_format import MessageToDict + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + LOG_NAME = "test.log" + inner_struct = Struct(fields={"foo": Value(string_value="bar")}) + message = Struct(fields={"inner": Value(struct_value=inner_struct)}) + + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + "logName": LOG_NAME, + "protoPayload": MessageToDict(message), + "resource": _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + + def test_to_api_repr_proto_inner_list_field(self): + from google.protobuf.json_format import MessageToDict + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.protobuf.struct_pb2 import ListValue + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + LOG_NAME = "test.log" + lines = ListValue( + values=[Value(string_value="line1"), Value(string_value="line2")] + ) + message = Struct(fields={"lines": Value(list_value=lines)}) + + entry = self._make_one(log_name=LOG_NAME, payload=message) + expected = { + "logName": LOG_NAME, + "protoPayload": MessageToDict(message), + "resource": _GLOBAL_RESOURCE._to_dict(), + } + self.assertEqual(entry.to_api_repr(), expected) + def test_to_api_repr_proto_explicit(self): import datetime from google.protobuf.json_format import MessageToDict From 3ef685dc644df86255d7a4d9e710c755f12ec092 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 26 Oct 2023 14:10:34 -0700 Subject: [PATCH 04/19] chore: rename rst files to avoid conflict with service names (#800) Source-Link: https://github.com/googleapis/synthtool/commit/d52e638b37b091054c869bfa6f5a9fedaba9e0dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index dd98abbde..7f291dbd5 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 -# created: 2023-10-09T14:06:13.397766266Z + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 +# created: 2023-10-18T20:26:37.410353675Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 0332d3267..16170d0ca 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine From fe8111af6482c810e1fe850548080bd3ccf283a3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 1 Nov 2023 17:17:40 +0100 Subject: [PATCH 05/19] chore(deps): update all dependencies (#796) --- samples/snippets/requirements-test.txt | 2 +- samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 1779d47d2..908e344b5 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,2 +1,2 @@ backoff==2.2.1 -pytest==7.4.2 +pytest==7.4.3 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index a8c7b9931..0b0ff4e67 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==3.8.0 -google-cloud-bigquery==3.12.0 -google-cloud-storage==2.11.0 +google-cloud-bigquery==3.13.0 +google-cloud-storage==2.13.0 google-cloud-pubsub==2.18.4 From b14bb144fad2dcf067b7e62e402b708f45ebadbe Mon Sep 17 00:00:00 2001 From: gkevinzheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 1 Nov 2023 14:07:12 -0400 Subject: [PATCH 06/19] fix: Fixed object paths in autogenerated code in owlbot.py (#804) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Fixed object paths in autogenerated code in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- owlbot.py | 105 ++++++++++++++++++ ...onfig_service_v2_copy_log_entries_async.py | 4 +- ...config_service_v2_copy_log_entries_sync.py | 4 +- ...d_config_service_v2_create_bucket_async.py | 4 +- ...ig_service_v2_create_bucket_async_async.py | 4 +- ...fig_service_v2_create_bucket_async_sync.py | 4 +- ...ed_config_service_v2_create_bucket_sync.py | 4 +- ...onfig_service_v2_create_exclusion_async.py | 6 +- ...config_service_v2_create_exclusion_sync.py | 6 +- ...ted_config_service_v2_create_link_async.py | 4 +- ...ated_config_service_v2_create_link_sync.py | 4 +- ...ted_config_service_v2_create_sink_async.py | 6 +- ...ated_config_service_v2_create_sink_sync.py | 6 +- ...ted_config_service_v2_create_view_async.py | 4 +- ...ated_config_service_v2_create_view_sync.py | 4 +- ...d_config_service_v2_delete_bucket_async.py | 4 +- ...ed_config_service_v2_delete_bucket_sync.py | 4 +- ...onfig_service_v2_delete_exclusion_async.py | 4 +- ...config_service_v2_delete_exclusion_sync.py | 4 +- ...ted_config_service_v2_delete_link_async.py | 4 +- ...ated_config_service_v2_delete_link_sync.py | 4 +- ...ted_config_service_v2_delete_sink_async.py | 4 +- ...ated_config_service_v2_delete_sink_sync.py | 4 +- ...ted_config_service_v2_delete_view_async.py | 4 +- ...ated_config_service_v2_delete_view_sync.py | 4 +- ...ated_config_service_v2_get_bucket_async.py | 4 +- ...rated_config_service_v2_get_bucket_sync.py | 4 +- ...nfig_service_v2_get_cmek_settings_async.py | 4 +- ...onfig_service_v2_get_cmek_settings_sync.py | 4 +- ...d_config_service_v2_get_exclusion_async.py | 4 +- ...ed_config_service_v2_get_exclusion_sync.py | 4 +- ...erated_config_service_v2_get_link_async.py | 4 +- ...nerated_config_service_v2_get_link_sync.py | 4 +- ...ed_config_service_v2_get_settings_async.py | 4 +- ...ted_config_service_v2_get_settings_sync.py | 4 +- ...erated_config_service_v2_get_sink_async.py | 4 +- ...nerated_config_service_v2_get_sink_sync.py | 4 +- ...erated_config_service_v2_get_view_async.py | 4 +- ...nerated_config_service_v2_get_view_sync.py | 4 +- ...ed_config_service_v2_list_buckets_async.py | 4 +- ...ted_config_service_v2_list_buckets_sync.py | 4 +- ...config_service_v2_list_exclusions_async.py | 4 +- ..._config_service_v2_list_exclusions_sync.py | 4 +- ...ated_config_service_v2_list_links_async.py | 4 +- ...rated_config_service_v2_list_links_sync.py | 4 +- ...ated_config_service_v2_list_sinks_async.py | 4 +- ...rated_config_service_v2_list_sinks_sync.py | 4 +- ...ated_config_service_v2_list_views_async.py | 4 +- ...rated_config_service_v2_list_views_sync.py | 4 +- ...config_service_v2_undelete_bucket_async.py | 4 +- ..._config_service_v2_undelete_bucket_sync.py | 4 +- ...d_config_service_v2_update_bucket_async.py | 4 +- ...ig_service_v2_update_bucket_async_async.py | 4 +- ...fig_service_v2_update_bucket_async_sync.py | 4 +- ...ed_config_service_v2_update_bucket_sync.py | 4 +- ...g_service_v2_update_cmek_settings_async.py | 4 +- ...ig_service_v2_update_cmek_settings_sync.py | 4 +- ...onfig_service_v2_update_exclusion_async.py | 6 +- ...config_service_v2_update_exclusion_sync.py | 6 +- ...config_service_v2_update_settings_async.py | 4 +- ..._config_service_v2_update_settings_sync.py | 4 +- ...ted_config_service_v2_update_sink_async.py | 6 +- ...ated_config_service_v2_update_sink_sync.py | 6 +- ...ted_config_service_v2_update_view_async.py | 4 +- ...ated_config_service_v2_update_view_sync.py | 4 +- ...ted_logging_service_v2_delete_log_async.py | 4 +- ...ated_logging_service_v2_delete_log_sync.py | 4 +- ...gging_service_v2_list_log_entries_async.py | 4 +- ...ogging_service_v2_list_log_entries_sync.py | 4 +- ...ated_logging_service_v2_list_logs_async.py | 4 +- ...rated_logging_service_v2_list_logs_sync.py | 4 +- ...st_monitored_resource_descriptors_async.py | 4 +- ...ist_monitored_resource_descriptors_sync.py | 4 +- ...gging_service_v2_tail_log_entries_async.py | 6 +- ...ogging_service_v2_tail_log_entries_sync.py | 6 +- ...ging_service_v2_write_log_entries_async.py | 6 +- ...gging_service_v2_write_log_entries_sync.py | 6 +- ...rics_service_v2_create_log_metric_async.py | 6 +- ...trics_service_v2_create_log_metric_sync.py | 6 +- ...rics_service_v2_delete_log_metric_async.py | 4 +- ...trics_service_v2_delete_log_metric_sync.py | 4 +- ...metrics_service_v2_get_log_metric_async.py | 4 +- ..._metrics_service_v2_get_log_metric_sync.py | 4 +- ...trics_service_v2_list_log_metrics_async.py | 4 +- ...etrics_service_v2_list_log_metrics_sync.py | 4 +- ...rics_service_v2_update_log_metric_async.py | 6 +- ...trics_service_v2_update_log_metric_sync.py | 6 +- 87 files changed, 293 insertions(+), 188 deletions(-) diff --git a/owlbot.py b/owlbot.py index 3e932c854..8e3057207 100644 --- a/owlbot.py +++ b/owlbot.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import glob import json import os import shutil @@ -139,6 +140,110 @@ def place_before(path, text, *before_text, escape=None): python.py_samples() +# For autogenerated sample code, resolve object paths by finding the specific subpackage +# the object belongs to. This is because we leave out all autogenerated packages from the +# __init__.py of logging_v2. For now, this is manually copy-pasted from the __all__s of each +# subpackage's __init__.py. +gapic_objects = { + "logging_v2.services.config_service_v2": [ + "ConfigServiceV2Client", + "ConfigServiceV2AsyncClient" + ], + "logging_v2.services.logging_service_v2": [ + "LoggingServiceV2Client", + "LoggingServiceV2AsyncClient" + ], + "logging_v2.services.metrics_service_v2": [ + "MetricsServiceV2Client", + "MetricsServiceV2AsyncClient" + ], + "logging_v2.types": [ + "LogEntry", + "LogEntryOperation", + "LogEntrySourceLocation", + "LogSplit", + "DeleteLogRequest", + "ListLogEntriesRequest", + "ListLogEntriesResponse", + "ListLogsRequest", + "ListLogsResponse", + "ListMonitoredResourceDescriptorsRequest", + "ListMonitoredResourceDescriptorsResponse", + "TailLogEntriesRequest", + "TailLogEntriesResponse", + "WriteLogEntriesPartialErrors", + "WriteLogEntriesRequest", + "WriteLogEntriesResponse", + "BigQueryDataset", + "BigQueryOptions", + "BucketMetadata", + "CmekSettings", + "CopyLogEntriesMetadata", + "CopyLogEntriesRequest", + "CopyLogEntriesResponse", + "CreateBucketRequest", + "CreateExclusionRequest", + "CreateLinkRequest", + "CreateSinkRequest", + "CreateViewRequest", + "DeleteBucketRequest", + "DeleteExclusionRequest", + "DeleteLinkRequest", + "DeleteSinkRequest", + "DeleteViewRequest", + "GetBucketRequest", + "GetCmekSettingsRequest", + "GetExclusionRequest", + "GetLinkRequest", + "GetSettingsRequest", + "GetSinkRequest", + "GetViewRequest", + "IndexConfig", + "Link", + "LinkMetadata", + "ListBucketsRequest", + "ListBucketsResponse", + "ListExclusionsRequest", + "ListExclusionsResponse", + "ListLinksRequest", + "ListLinksResponse", + "ListSinksRequest", + "ListSinksResponse", + "ListViewsRequest", + "ListViewsResponse", + "LocationMetadata", + "LogBucket", + "LogExclusion", + "LogSink", + "LogView", + "Settings", + "UndeleteBucketRequest", + "UpdateBucketRequest", + "UpdateCmekSettingsRequest", + "UpdateExclusionRequest", + "UpdateSettingsRequest", + "UpdateSinkRequest", + "UpdateViewRequest", + "IndexType", + "LifecycleState", + "OperationState", + "CreateLogMetricRequest", + "DeleteLogMetricRequest", + "GetLogMetricRequest", + "ListLogMetricsRequest", + "ListLogMetricsResponse", + "LogMetric", + "UpdateLogMetricRequest" + ] +} + +sample_files = glob.glob("samples/generated_samples/logging_v2_*.py") +for subpackage_name in gapic_objects: + for object_name in gapic_objects[subpackage_name]: + text = "logging_v2." + object_name + replacement = subpackage_name + "." + object_name + s.replace(sample_files, text, replacement) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) s.shell.run(["nox", "-s", "blacken"], cwd="samples/snippets", hide_output=False) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index 806e937dd..993293752 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -36,10 +36,10 @@ async def sample_copy_log_entries(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CopyLogEntriesRequest( + request = logging_v2.types.CopyLogEntriesRequest( name="name_value", destination="destination_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index ca0209f00..b95b83ab4 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -36,10 +36,10 @@ def sample_copy_log_entries(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CopyLogEntriesRequest( + request = logging_v2.types.CopyLogEntriesRequest( name="name_value", destination="destination_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index c1f028fb0..089263531 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -36,10 +36,10 @@ async def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index 8fe42df3c..8d55ee0bf 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -36,10 +36,10 @@ async def sample_create_bucket_async(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index 1ce698784..9b71e2d74 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -36,10 +36,10 @@ def sample_create_bucket_async(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index dc73253f4..111a2d272 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -36,10 +36,10 @@ def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateBucketRequest( + request = logging_v2.types.CreateBucketRequest( parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 17490c61e..b59271905 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -36,14 +36,14 @@ async def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.CreateExclusionRequest( + request = logging_v2.types.CreateExclusionRequest( parent="parent_value", exclusion=exclusion, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 75ec32f48..a3b20a5f3 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -36,14 +36,14 @@ def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.CreateExclusionRequest( + request = logging_v2.types.CreateExclusionRequest( parent="parent_value", exclusion=exclusion, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py index 8ceb52985..c130fe56d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -36,10 +36,10 @@ async def sample_create_link(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateLinkRequest( + request = logging_v2.types.CreateLinkRequest( parent="parent_value", link_id="link_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py index 604ff6626..ce3bbfd12 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -36,10 +36,10 @@ def sample_create_link(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateLinkRequest( + request = logging_v2.types.CreateLinkRequest( parent="parent_value", link_id="link_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 277e83055..c4deb526b 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -36,14 +36,14 @@ async def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.CreateSinkRequest( + request = logging_v2.types.CreateSinkRequest( parent="parent_value", sink=sink, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index a4df02994..16db9a155 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -36,14 +36,14 @@ def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.CreateSinkRequest( + request = logging_v2.types.CreateSinkRequest( parent="parent_value", sink=sink, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 5cd201276..8eaba2353 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -36,10 +36,10 @@ async def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.CreateViewRequest( + request = logging_v2.types.CreateViewRequest( parent="parent_value", view_id="view_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index cd3ca94e1..7f1f4a7dc 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -36,10 +36,10 @@ def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.CreateViewRequest( + request = logging_v2.types.CreateViewRequest( parent="parent_value", view_id="view_id_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index fcffb6db8..cb409bf4b 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -36,10 +36,10 @@ async def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteBucketRequest( + request = logging_v2.types.DeleteBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index a8f902116..a31d04ceb 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -36,10 +36,10 @@ def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteBucketRequest( + request = logging_v2.types.DeleteBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index b426d4703..6bd56016a 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -36,10 +36,10 @@ async def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( + request = logging_v2.types.DeleteExclusionRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index 5d98f782b..66c82b08b 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -36,10 +36,10 @@ def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( + request = logging_v2.types.DeleteExclusionRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py index 8c7a934a7..9c47004ed 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -36,10 +36,10 @@ async def sample_delete_link(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteLinkRequest( + request = logging_v2.types.DeleteLinkRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py index dfa59b307..209651ad6 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -36,10 +36,10 @@ def sample_delete_link(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteLinkRequest( + request = logging_v2.types.DeleteLinkRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index 11d91947e..d8b4f4832 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -36,10 +36,10 @@ async def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteSinkRequest( + request = logging_v2.types.DeleteSinkRequest( sink_name="sink_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index bf9875b0a..947fdf52d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -36,10 +36,10 @@ def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteSinkRequest( + request = logging_v2.types.DeleteSinkRequest( sink_name="sink_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index fe9c7031e..1fe4e6dae 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -36,10 +36,10 @@ async def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteViewRequest( + request = logging_v2.types.DeleteViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index b5539c04f..6416ff773 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -36,10 +36,10 @@ def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteViewRequest( + request = logging_v2.types.DeleteViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index c29c35a4e..11ce2f13a 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -36,10 +36,10 @@ async def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetBucketRequest( + request = logging_v2.types.GetBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 818ab646f..ac8db3444 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -36,10 +36,10 @@ def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetBucketRequest( + request = logging_v2.types.GetBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 3ae13401d..660759e09 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -36,10 +36,10 @@ async def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.types.GetCmekSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index 499d68bdb..eedf30d59 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -36,10 +36,10 @@ def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.types.GetCmekSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 03b7dc7d8..a296e0bdd 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -36,10 +36,10 @@ async def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.types.GetExclusionRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index a445ed396..bd47eede1 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -36,10 +36,10 @@ def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.types.GetExclusionRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py index ddc3d131f..efc87806d 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -36,10 +36,10 @@ async def sample_get_link(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetLinkRequest( + request = logging_v2.types.GetLinkRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py index 3a7643b3a..8db2ca310 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -36,10 +36,10 @@ def sample_get_link(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetLinkRequest( + request = logging_v2.types.GetLinkRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index 4ee968e81..0eb6fb853 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -36,10 +36,10 @@ async def sample_get_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetSettingsRequest( + request = logging_v2.types.GetSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index a3e018440..b0290a2fb 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -36,10 +36,10 @@ def sample_get_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetSettingsRequest( + request = logging_v2.types.GetSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index b6fe5b11e..694d6ddab 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -36,10 +36,10 @@ async def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetSinkRequest( + request = logging_v2.types.GetSinkRequest( sink_name="sink_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index ecebaf119..2a0f1c100 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -36,10 +36,10 @@ def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetSinkRequest( + request = logging_v2.types.GetSinkRequest( sink_name="sink_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 5992e53ee..f0438a0a1 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -36,10 +36,10 @@ async def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetViewRequest( + request = logging_v2.types.GetViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index 14d8679bc..f0e60b745 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -36,10 +36,10 @@ def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetViewRequest( + request = logging_v2.types.GetViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index dfbc3b411..883810c4e 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -36,10 +36,10 @@ async def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListBucketsRequest( + request = logging_v2.types.ListBucketsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 8626f7cae..641d8f6b6 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -36,10 +36,10 @@ def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListBucketsRequest( + request = logging_v2.types.ListBucketsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index ad42edff5..444ca9c5b 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -36,10 +36,10 @@ async def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.types.ListExclusionsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 727723a7a..ec66239ed 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -36,10 +36,10 @@ def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.types.ListExclusionsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py index 7eccffaa6..cad31c4d4 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -36,10 +36,10 @@ async def sample_list_links(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLinksRequest( + request = logging_v2.types.ListLinksRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py index a2f98d69d..ec752eda8 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -36,10 +36,10 @@ def sample_list_links(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLinksRequest( + request = logging_v2.types.ListLinksRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index b642d38ee..83754a238 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -36,10 +36,10 @@ async def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListSinksRequest( + request = logging_v2.types.ListSinksRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index b4fc92452..d79a68b02 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -36,10 +36,10 @@ def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListSinksRequest( + request = logging_v2.types.ListSinksRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 1542a5a38..1a36ac665 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -36,10 +36,10 @@ async def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListViewsRequest( + request = logging_v2.types.ListViewsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index b273c465d..1fdb4e9c1 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -36,10 +36,10 @@ def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListViewsRequest( + request = logging_v2.types.ListViewsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index d2695708d..52001dd74 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -36,10 +36,10 @@ async def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UndeleteBucketRequest( + request = logging_v2.types.UndeleteBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 8d25c7d33..9e04ebadc 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -36,10 +36,10 @@ def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UndeleteBucketRequest( + request = logging_v2.types.UndeleteBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index e1c741b67..6bebb3792 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -36,10 +36,10 @@ async def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index 7dde59dcd..8f0b5b107 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -36,10 +36,10 @@ async def sample_update_bucket_async(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index 2ecaf8df2..7c6c37160 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -36,10 +36,10 @@ def sample_update_bucket_async(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 7b4a3c597..d1f37e929 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -36,10 +36,10 @@ def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateBucketRequest( + request = logging_v2.types.UpdateBucketRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index 96fc8ff97..89fb901e5 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -36,10 +36,10 @@ async def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.types.UpdateCmekSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 9bbc7dcb1..31b5415fc 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -36,10 +36,10 @@ def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.types.UpdateCmekSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index d8b8d7f7b..7df03d1e7 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -36,14 +36,14 @@ async def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.types.UpdateExclusionRequest( name="name_value", exclusion=exclusion, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index 36d5776e3..cc17ec23c 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -36,14 +36,14 @@ def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + exclusion = logging_v2.types.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.types.UpdateExclusionRequest( name="name_value", exclusion=exclusion, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index b51dd81cc..1242c3cfb 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -36,10 +36,10 @@ async def sample_update_settings(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateSettingsRequest( + request = logging_v2.types.UpdateSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 1e7aefce8..3edc24c96 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -36,10 +36,10 @@ def sample_update_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateSettingsRequest( + request = logging_v2.types.UpdateSettingsRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index aef847379..d97391677 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -36,14 +36,14 @@ async def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.UpdateSinkRequest( + request = logging_v2.types.UpdateSinkRequest( sink_name="sink_name_value", sink=sink, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index e84230202..ec2ff7fbf 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -36,14 +36,14 @@ def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - sink = logging_v2.LogSink() + sink = logging_v2.types.LogSink() sink.name = "name_value" sink.destination = "destination_value" - request = logging_v2.UpdateSinkRequest( + request = logging_v2.types.UpdateSinkRequest( sink_name="sink_name_value", sink=sink, ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index f143a772c..949b9d98b 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -36,10 +36,10 @@ async def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2AsyncClient() + client = logging_v2.services.config_service_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateViewRequest( + request = logging_v2.types.UpdateViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 3867d3702..538908484 100644 --- a/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -36,10 +36,10 @@ def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.services.config_service_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateViewRequest( + request = logging_v2.types.UpdateViewRequest( name="name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index c6469053b..7032872fa 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -36,10 +36,10 @@ async def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteLogRequest( + request = logging_v2.types.DeleteLogRequest( log_name="log_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 1e4e28abc..12124e531 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -36,10 +36,10 @@ def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteLogRequest( + request = logging_v2.types.DeleteLogRequest( log_name="log_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index d5cfe190c..e310819b7 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -36,10 +36,10 @@ async def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogEntriesRequest( + request = logging_v2.types.ListLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index d24923cb1..7e20ad165 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -36,10 +36,10 @@ def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogEntriesRequest( + request = logging_v2.types.ListLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 71859024d..3149daeb1 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -36,10 +36,10 @@ async def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogsRequest( + request = logging_v2.types.ListLogsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 5a5ff140c..04441e671 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -36,10 +36,10 @@ def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogsRequest( + request = logging_v2.types.ListLogsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 519a2498a..a18674441 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -36,10 +36,10 @@ async def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( + request = logging_v2.types.ListMonitoredResourceDescriptorsRequest( ) # Make the request diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index ca97be4b3..399bf369a 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -36,10 +36,10 @@ def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListMonitoredResourceDescriptorsRequest( + request = logging_v2.types.ListMonitoredResourceDescriptorsRequest( ) # Make the request diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 24e9e2009..1ce36bba4 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -36,15 +36,15 @@ async def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.TailLogEntriesRequest( + request = logging_v2.types.TailLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # 'logging_v2.types.TailLogEntriesRequest' objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index dc9a545e7..1756dccec 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -36,15 +36,15 @@ def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - request = logging_v2.TailLogEntriesRequest( + request = logging_v2.types.TailLogEntriesRequest( resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains - # 'logging_v2.TailLogEntriesRequest' objects + # 'logging_v2.types.TailLogEntriesRequest' objects # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index 1a0d48664..eb377d226 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -36,13 +36,13 @@ async def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2AsyncClient() + client = logging_v2.services.logging_service_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - entries = logging_v2.LogEntry() + entries = logging_v2.types.LogEntry() entries.log_name = "log_name_value" - request = logging_v2.WriteLogEntriesRequest( + request = logging_v2.types.WriteLogEntriesRequest( entries=entries, ) diff --git a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index de4bfe6c8..4d30f92fb 100644 --- a/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -36,13 +36,13 @@ def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.services.logging_service_v2.LoggingServiceV2Client() # Initialize request argument(s) - entries = logging_v2.LogEntry() + entries = logging_v2.types.LogEntry() entries.log_name = "log_name_value" - request = logging_v2.WriteLogEntriesRequest( + request = logging_v2.types.WriteLogEntriesRequest( entries=entries, ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index 557d32293..9af902280 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -36,14 +36,14 @@ async def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.CreateLogMetricRequest( + request = logging_v2.types.CreateLogMetricRequest( parent="parent_value", metric=metric, ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index b9e9cade9..a0a68cfed 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -36,14 +36,14 @@ def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.CreateLogMetricRequest( + request = logging_v2.types.CreateLogMetricRequest( parent="parent_value", metric=metric, ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index fea40e7a4..0d0f9f4c8 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -36,10 +36,10 @@ async def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteLogMetricRequest( + request = logging_v2.types.DeleteLogMetricRequest( metric_name="metric_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index ccf2983be..5452c586f 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -36,10 +36,10 @@ def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteLogMetricRequest( + request = logging_v2.types.DeleteLogMetricRequest( metric_name="metric_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 4b6984171..53f9e5b06 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -36,10 +36,10 @@ async def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetLogMetricRequest( + request = logging_v2.types.GetLogMetricRequest( metric_name="metric_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index abb071c65..26409d6d9 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -36,10 +36,10 @@ def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetLogMetricRequest( + request = logging_v2.types.GetLogMetricRequest( metric_name="metric_name_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index f280ec9de..325cf4d44 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -36,10 +36,10 @@ async def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListLogMetricsRequest( + request = logging_v2.types.ListLogMetricsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index bffbe10a8..9442a7a01 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -36,10 +36,10 @@ def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListLogMetricsRequest( + request = logging_v2.types.ListLogMetricsRequest( parent="parent_value", ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 59bfeeaaa..047ae2c86 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -36,14 +36,14 @@ async def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2AsyncClient() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.UpdateLogMetricRequest( + request = logging_v2.types.UpdateLogMetricRequest( metric_name="metric_name_value", metric=metric, ) diff --git a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index ed4dd0126..583fa4c7b 100644 --- a/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -36,14 +36,14 @@ def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.services.metrics_service_v2.MetricsServiceV2Client() # Initialize request argument(s) - metric = logging_v2.LogMetric() + metric = logging_v2.types.LogMetric() metric.name = "name_value" metric.filter = "filter_value" - request = logging_v2.UpdateLogMetricRequest( + request = logging_v2.types.UpdateLogMetricRequest( metric_name="metric_name_value", metric=metric, ) From c3839602af188e209da1c600d0076ff0d746ed58 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 2 Nov 2023 21:23:28 -0400 Subject: [PATCH 07/19] chore: update docfx minimum Python version (#810) Source-Link: https://github.com/googleapis/synthtool/commit/bc07fd415c39853b382bcf8315f8eeacdf334055 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- noxfile.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7f291dbd5..ec696b558 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 -# created: 2023-10-18T20:26:37.410353675Z + digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 +# created: 2023-11-03T00:57:07.335914631Z diff --git a/noxfile.py b/noxfile.py index 565df040b..6f651e5ec 100644 --- a/noxfile.py +++ b/noxfile.py @@ -310,7 +310,7 @@ def docs(session): ) -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" From d73cc56ba49d13d2c876c9dbf5c76b2308012161 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Nov 2023 17:36:56 -0500 Subject: [PATCH 08/19] chore: bump urllib3 from 1.26.12 to 1.26.18 (#811) Source-Link: https://github.com/googleapis/synthtool/commit/febacccc98d6d224aff9d0bd0373bb5a4cd5969c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/requirements.txt | 532 ++++++++++++++++++++------------------ 2 files changed, 277 insertions(+), 259 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ec696b558..453b540c1 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 -# created: 2023-11-03T00:57:07.335914631Z + digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 +# created: 2023-11-08T19:46:45.022803742Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 16170d0ca..8957e2110 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,91 +4,75 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e +argcomplete==3.1.4 \ + --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ + --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -109,78 +93,74 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 # via # gcp-releasetool # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e +distlib==0.3.7 \ + --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ + --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 +gcp-releasetool==1.16.0 \ + --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ + --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via # google-cloud-core # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a +google-cloud-core==2.3.3 \ + --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ + --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 +google-cloud-storage==2.13.0 \ + --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ + --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -251,29 +231,31 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.6.0 \ + --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ + --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c +googleapis-common-protos==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 +importlib-metadata==6.8.0 \ + --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ + --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 # via # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -285,75 +267,121 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==24.2.0 \ + --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ + --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 # via # gcp-releasetool # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +more-itertools==10.1.0 \ + --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ + --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 +nh3==0.2.14 \ + --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ + --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ + --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ + --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ + --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ + --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ + --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ + --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ + --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ + --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ + --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ + --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ + --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ + --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ + --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ + --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # gcp-releasetool # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -383,34 +411,30 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 +pygments==2.16.1 \ + --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ + --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool @@ -418,9 +442,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ @@ -431,17 +455,17 @@ requests==2.31.0 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 +rich==13.6.0 \ + --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ + --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -455,43 +479,37 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # bleach # gcp-docuploader - # google-auth # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==1.26.18 \ - --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ - --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # requests # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 +virtualenv==20.24.6 \ + --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ + --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.41.3 \ + --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ + --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 10ad75d2b9276df389f5069f9f143f8f4621d04d Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 29 Nov 2023 12:41:31 -0500 Subject: [PATCH 09/19] feat: use native namespaces instead of pkg_resources (#812) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: use native namespaces instead of pkg_resources * linting * Added packaging test for native namespace support. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- google/__init__.py | 22 -------------- google/cloud/__init__.py | 22 -------------- setup.py | 7 +---- tests/unit/test_packaging.py | 56 ++++++++++++++++++++++++++++++++++++ 4 files changed, 57 insertions(+), 50 deletions(-) delete mode 100644 google/__init__.py delete mode 100644 google/cloud/__init__.py create mode 100644 tests/unit/test_packaging.py diff --git a/google/__init__.py b/google/__init__.py deleted file mode 100644 index 0e1bc5131..000000000 --- a/google/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/google/cloud/__init__.py b/google/cloud/__init__.py deleted file mode 100644 index 0e1bc5131..000000000 --- a/google/cloud/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/setup.py b/setup.py index f43fd0bf9..e4a71277a 100644 --- a/setup.py +++ b/setup.py @@ -55,14 +55,10 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -89,7 +85,6 @@ platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py new file mode 100644 index 000000000..4369ca2c1 --- /dev/null +++ b/tests/unit/test_packaging.py @@ -0,0 +1,56 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-logging``. + + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + + google_otherpkg = tmp_path / "google" / "otherpkg" + google_otherpkg.mkdir() + google_otherpkg.joinpath("__init__.py").write_text("") + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-logging``. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + + google_cloud_otherpkg = tmp_path / "google" / "cloud" / "otherpkg" + google_cloud_otherpkg.mkdir() + google_cloud_otherpkg.joinpath("__init__.py").write_text("") + + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + + for pkg in [ + "google.othermod", + "google.cloud.othermod", + "google.otherpkg", + "google.cloud.otherpkg", + "google.cloud.logging", + ]: + cmd = [sys.executable, "-c", f"import {pkg}"] + subprocess.check_output(cmd, env=env) + + for module in ["google.othermod", "google.cloud.othermod"]: + cmd = [sys.executable, "-m", module] + subprocess.check_output(cmd, env=env) From 6591b53e3fcd67e156765f329700443647b70349 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 15:19:46 -0500 Subject: [PATCH 10/19] feat: Add support for Python 3.12 (#813) * chore(python): Add Python 3.12 Source-Link: https://github.com/googleapis/synthtool/commit/af16e6d4672cc7b400f144de2fc3068b54ff47d2 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 * add trove classifier for python 3.12 * add python 3.12, and older, as a required check --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 +- .github/sync-repo-settings.yaml | 4 ++ .kokoro/samples/python3.12/common.cfg | 59 ++++++++++++++++++++ .kokoro/samples/python3.12/continuous.cfg | 6 ++ .kokoro/samples/python3.12/periodic-head.cfg | 11 ++++ .kokoro/samples/python3.12/periodic.cfg | 6 ++ .kokoro/samples/python3.12/presubmit.cfg | 6 ++ CONTRIBUTING.rst | 6 +- noxfile.py | 2 +- samples/snippets/noxfile.py | 2 +- setup.py | 1 + 11 files changed, 101 insertions(+), 6 deletions(-) create mode 100644 .kokoro/samples/python3.12/common.cfg create mode 100644 .kokoro/samples/python3.12/continuous.cfg create mode 100644 .kokoro/samples/python3.12/periodic-head.cfg create mode 100644 .kokoro/samples/python3.12/periodic.cfg create mode 100644 .kokoro/samples/python3.12/presubmit.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 453b540c1..eb4d9f794 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 -# created: 2023-11-08T19:46:45.022803742Z + digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 +# created: 2023-11-23T18:17:28.105124211Z diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 37438d33d..439a0bcb7 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -12,3 +12,7 @@ branchProtectionRules: - 'Samples - Lint' - 'Samples - Python 3.7' - 'Samples - Python 3.8' + - 'Samples - Python 3.9' + - 'Samples - Python 3.10' + - 'Samples - Python 3.11' + - 'Samples - Python 3.12' diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 000000000..fb8ce8795 --- /dev/null +++ b/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,59 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-logging/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "logging" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 000000000..7e2973e3b --- /dev/null +++ b/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-logging/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 6fa7a4dac..f5be18c3d 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.11 -- -k + $ nox -s unit-3.12 -- -k .. note:: @@ -226,12 +226,14 @@ We support: - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ +- `Python 3.12`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/noxfile.py b/noxfile.py index 6f651e5ec..9cff1ae84 100644 --- a/noxfile.py +++ b/noxfile.py @@ -34,7 +34,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 1224cbe21..3b7135946 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/setup.py b/setup.py index e4a71277a..978175d3a 100644 --- a/setup.py +++ b/setup.py @@ -79,6 +79,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], From c79f7f55dddb170eac29f24b23bfe1dde8bfbda8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 10:43:08 -0500 Subject: [PATCH 11/19] fix: use `retry_async` instead of `retry` in async client (#816) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.13.0 PiperOrigin-RevId: 586460538 Source-Link: https://github.com/googleapis/googleapis/commit/44582d0577fdc95dd2af37628a0569e16aac0bfe Source-Link: https://github.com/googleapis/googleapis-gen/commit/5e7073c9de847929c4ae97f8a444c3fca2d45a6b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWU3MDczYzlkZTg0NzkyOWM0YWU5N2Y4YTQ0NGMzZmNhMmQ0NWE2YiJ9 chore: Update gapic-generator-python to v1.12.0 PiperOrigin-RevId: 586356061 Source-Link: https://github.com/googleapis/googleapis/commit/72a1f55abaedbb62decd8ae8a44a4de223799c76 Source-Link: https://github.com/googleapis/googleapis-gen/commit/558a04bcd1cc0576e8fac1089e48e48b27ac161b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU4YTA0YmNkMWNjMDU3NmU4ZmFjMTA4OWU0OGU0OGIyN2FjMTYxYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 96 +++++++++---------- .../logging_service_v2/async_client.py | 42 ++++---- .../metrics_service_v2/async_client.py | 36 +++---- .../snippet_metadata_google.logging.v2.json | 2 +- .../logging_v2/test_config_service_v2.py | 6 +- .../logging_v2/test_logging_service_v2.py | 6 +- .../logging_v2/test_metrics_service_v2.py | 6 +- 7 files changed, 97 insertions(+), 97 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index e066569f7..3962c40e9 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -294,7 +294,7 @@ async def sample_list_buckets(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -399,7 +399,7 @@ async def sample_get_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): The request object. The parameters to ``GetBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -486,7 +486,7 @@ async def sample_create_bucket_async(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -586,7 +586,7 @@ async def sample_update_bucket_async(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -679,7 +679,7 @@ async def sample_create_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -764,7 +764,7 @@ async def sample_update_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -845,7 +845,7 @@ async def sample_delete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): The request object. The parameters to ``DeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -914,7 +914,7 @@ async def sample_undelete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): The request object. The parameters to ``UndeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -996,7 +996,7 @@ async def sample_list_views(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1101,7 +1101,7 @@ async def sample_get_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): The request object. The parameters to ``GetView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1182,7 +1182,7 @@ async def sample_create_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): The request object. The parameters to ``CreateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1265,7 +1265,7 @@ async def sample_update_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): The request object. The parameters to ``UpdateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1344,7 +1344,7 @@ async def sample_delete_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): The request object. The parameters to ``DeleteView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1430,7 +1430,7 @@ async def sample_list_sinks(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1465,7 +1465,7 @@ async def sample_list_sinks(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_sinks, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1563,7 +1563,7 @@ async def sample_get_sink(): This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1603,7 +1603,7 @@ async def sample_get_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1711,7 +1711,7 @@ async def sample_create_sink(): This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1873,7 +1873,7 @@ async def sample_update_sink(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1917,7 +1917,7 @@ async def sample_update_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2007,7 +2007,7 @@ async def sample_delete_sink(): This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2034,7 +2034,7 @@ async def sample_delete_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2142,7 +2142,7 @@ async def sample_create_link(): This corresponds to the ``link_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2267,7 +2267,7 @@ async def sample_delete_link(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2392,7 +2392,7 @@ async def sample_list_links(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2509,7 +2509,7 @@ async def sample_get_link(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2619,7 +2619,7 @@ async def sample_list_exclusions(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2654,7 +2654,7 @@ async def sample_list_exclusions(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_exclusions, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2752,7 +2752,7 @@ async def sample_get_exclusion(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2790,7 +2790,7 @@ async def sample_get_exclusion(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_exclusion, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2897,7 +2897,7 @@ async def sample_create_exclusion(): This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3045,7 +3045,7 @@ async def sample_update_exclusion(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3163,7 +3163,7 @@ async def sample_delete_exclusion(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3190,7 +3190,7 @@ async def sample_delete_exclusion(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_exclusion, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -3272,7 +3272,7 @@ async def sample_get_cmek_settings(): See `Enabling CMEK for Log Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3380,7 +3380,7 @@ async def sample_update_cmek_settings(): See `Enabling CMEK for Log Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3509,7 +3509,7 @@ async def sample_get_settings(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3651,7 +3651,7 @@ async def sample_update_settings(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3753,7 +3753,7 @@ async def sample_copy_log_entries(): Args: request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]): The request object. The parameters to CopyLogEntries. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3813,7 +3813,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3830,7 +3830,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -3867,7 +3867,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3884,7 +3884,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -3925,7 +3925,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3941,7 +3941,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index dcf622ac2..59dcad291 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -36,14 +36,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers @@ -278,7 +278,7 @@ async def sample_delete_log(): This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -305,7 +305,7 @@ async def sample_delete_log(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_log, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -472,7 +472,7 @@ async def sample_write_log_entries(): This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -510,7 +510,7 @@ async def sample_write_log_entries(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.write_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -629,7 +629,7 @@ async def sample_list_log_entries(): This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -668,7 +668,7 @@ async def sample_list_log_entries(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -746,7 +746,7 @@ async def sample_list_monitored_resource_descriptors(): request (Optional[Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]]): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -768,7 +768,7 @@ async def sample_list_monitored_resource_descriptors(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -857,7 +857,7 @@ async def sample_list_logs(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -893,7 +893,7 @@ async def sample_list_logs(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_logs, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -986,7 +986,7 @@ def request_generator(): Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): The request object AsyncIterator. The parameters to ``TailLogEntries``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1001,7 +1001,7 @@ def request_generator(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.tail_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1041,7 +1041,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1058,7 +1058,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1095,7 +1095,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1112,7 +1112,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1153,7 +1153,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1169,7 +1169,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index a120c352b..b2cf3e3d7 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore @@ -269,7 +269,7 @@ async def sample_list_log_metrics(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -305,7 +305,7 @@ async def sample_list_log_metrics(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_log_metrics, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -396,7 +396,7 @@ async def sample_get_log_metric(): This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -439,7 +439,7 @@ async def sample_get_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -540,7 +540,7 @@ async def sample_create_log_metric(): This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -672,7 +672,7 @@ async def sample_update_log_metric(): This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -717,7 +717,7 @@ async def sample_update_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -798,7 +798,7 @@ async def sample_delete_log_metric(): This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -825,7 +825,7 @@ async def sample_delete_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -870,7 +870,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -887,7 +887,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -924,7 +924,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -941,7 +941,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -982,7 +982,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -998,7 +998,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/samples/generated_samples/snippet_metadata_google.logging.v2.json b/samples/generated_samples/snippet_metadata_google.logging.v2.json index 6c11ae7e5..b62675ba6 100644 --- a/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "3.8.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 1af573470..abe89b5ae 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -9112,7 +9112,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9251,7 +9251,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -9396,7 +9396,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index ba5e56f22..498ad94af 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -3013,7 +3013,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3152,7 +3152,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3297,7 +3297,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 00e443415..f1d5ba3a3 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2899,7 +2899,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3038,7 +3038,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3183,7 +3183,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, From 8e256dc057299b15d245c61fe330f00a2c14ceb4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 13:47:47 -0500 Subject: [PATCH 12/19] chore: bump cryptography from 41.0.5 to 41.0.6 in /synthtool/gcp/templates/python_library/.kokoro (#815) Source-Link: https://github.com/googleapis/synthtool/commit/9367caadcbb30b5b2719f30eb00c44cc913550ed Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 48 +++++++++++++++++++-------------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index eb4d9f794..773c1dfd2 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 -# created: 2023-11-23T18:17:28.105124211Z + digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c +# created: 2023-11-29T14:54:29.548172703Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 8957e2110..e5c1ffca9 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,30 +93,30 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 +cryptography==41.0.6 \ + --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ + --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ + --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ + --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ + --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ + --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ + --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ + --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ + --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ + --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ + --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ + --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ + --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ + --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ + --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ + --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ + --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ + --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ + --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ + --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ + --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ + --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ + --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae # via # gcp-releasetool # secretstorage From d3dc2ac9df47087997dff0d516d676e8275b5ec4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 14:22:16 -0500 Subject: [PATCH 13/19] chore(deps): bump cryptography from 41.0.5 to 41.0.6 in /.kokoro (#814) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): bump cryptography from 41.0.5 to 41.0.6 in /.kokoro Bumps [cryptography](https://github.com/pyca/cryptography) from 41.0.5 to 41.0.6. - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/41.0.5...41.0.6) --- updated-dependencies: - dependency-name: cryptography dependency-type: indirect ... Signed-off-by: dependabot[bot] * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> From 0ba8220cd0b94fcd638be9931cd9093113d12111 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 4 Dec 2023 10:56:49 -0500 Subject: [PATCH 14/19] build: treat warnings as errors (#819) * build: treat warnings as errors * resolve warning Client.dataset is deprecated and will be removed in a future version * See https://github.com/googleapis/python-logging/issues/820 * address warning @pytest.yield_fixture is deprecated. Use @pytest.fixture instead; they are the same. * filter warnings from grpcio * revert * update comment --- pytest.ini | 23 ++++++++++++++++ samples/snippets/export_test.py | 2 +- tests/system/test_system.py | 2 +- tests/unit/handlers/test_app_engine.py | 29 +++++++++++++++++--- tests/unit/handlers/test_container_engine.py | 25 ++++++++++++++--- 5 files changed, 71 insertions(+), 10 deletions(-) create mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..994e939cb --- /dev/null +++ b/pytest.ini @@ -0,0 +1,23 @@ +[pytest] +filterwarnings = + # treat all warnings as errors + error + # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed + ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning + # Remove once Release PR https://github.com/googleapis/python-api-common-protos/pull/191 is merged + ignore:.*pkg_resources.declare_namespace:DeprecationWarning + ignore:.*pkg_resources is deprecated as an API:DeprecationWarning + # Remove once https://github.com/grpc/grpc/issues/35086 is fixed + ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel + # Remove once release PR https://github.com/googleapis/proto-plus-python/pull/391 is merged + ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers + # Remove once release PR https://github.com/googleapis/python-api-core/pull/555 is merged + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:google.api_core.datetime_helpers + # Remove once https://github.com/googleapis/python-logging/issues/818 is fixed + ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:google.cloud.logging_v2.handlers.transports + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test__http + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test_entries + # Remove once https://github.com/googleapis/python-logging/issues/820 is fixed + ignore:.*warn.*is deprecated, use.*warning.*instead:DeprecationWarning + # Remove once a version of grpcio newer than 1.59.3 is released to PyPI + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel diff --git a/samples/snippets/export_test.py b/samples/snippets/export_test.py index b1ecf4923..c5830e3b2 100644 --- a/samples/snippets/export_test.py +++ b/samples/snippets/export_test.py @@ -34,7 +34,7 @@ def _random_id(): ) -@pytest.yield_fixture +@pytest.fixture def example_sink(): client = logging.Client() diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 0d39aa0a9..ec67a99d0 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -813,7 +813,7 @@ def _init_bigquery_dataset(self): # Stackdriver Logging to write into it. retry = RetryErrors((TooManyRequests, BadGateway, ServiceUnavailable)) bigquery_client = bigquery.Client() - dataset_ref = bigquery_client.dataset(dataset_name) + dataset_ref = bigquery.DatasetReference(Config.CLIENT.project, dataset_name) dataset = retry(bigquery_client.create_dataset)(bigquery.Dataset(dataset_ref)) self.to_delete.append((bigquery_client, dataset)) bigquery_client.get_dataset(dataset) diff --git a/tests/unit/handlers/test_app_engine.py b/tests/unit/handlers/test_app_engine.py index 8eedfad9b..868fc9be8 100644 --- a/tests/unit/handlers/test_app_engine.py +++ b/tests/unit/handlers/test_app_engine.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import pytest import unittest import mock @@ -46,6 +47,9 @@ def test_constructor_w_gae_standard_env(self): ), mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", return_value=self.PROJECT, + ), pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", ): handler = self._make_one(client, transport=_Transport) @@ -78,6 +82,9 @@ def test_constructor_w_gae_flex_env(self): ), mock.patch( "google.cloud.logging_v2.handlers._monitored_resources.retrieve_metadata_server", return_value=self.PROJECT, + ), pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", ): handler = self._make_one( client, name=name, transport=_Transport, stream=stream @@ -99,7 +106,10 @@ def test_emit(self): "google.cloud.logging_v2.handlers.app_engine.get_request_data", return_value=(expected_http_request, trace_id, None, None), ) - with get_request_patch: + with get_request_patch, pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): # library integrations mocked to return test data client = mock.Mock(project=self.PROJECT, spec=["project"]) handler = self._make_one(client, transport=_Transport) @@ -137,7 +147,10 @@ def test_emit_manual_field_override(self): "google.cloud.logging_v2.handlers.app_engine.get_request_data", return_value=(inferred_http_request, inferred_trace_id, None, None), ) - with get_request_patch: + with get_request_patch, pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): # library integrations mocked to return test data client = mock.Mock(project=self.PROJECT, spec=["project"]) handler = self._make_one(client, transport=_Transport) @@ -197,12 +210,20 @@ def test_get_gae_labels_with_label(self): from google.cloud.logging_v2.handlers import app_engine trace_id = "test-gae-trace-id" - gae_labels = self._get_gae_labels_helper(trace_id) + with pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): + gae_labels = self._get_gae_labels_helper(trace_id) expected_labels = {app_engine._TRACE_ID_LABEL: trace_id} self.assertEqual(gae_labels, expected_labels) def test_get_gae_labels_without_label(self): - gae_labels = self._get_gae_labels_helper(None) + with pytest.warns( + DeprecationWarning, + match="AppEngineHandler is deprecated. Use CloudLoggingHandler instead", + ): + gae_labels = self._get_gae_labels_helper(None) self.assertEqual(gae_labels, {}) diff --git a/tests/unit/handlers/test_container_engine.py b/tests/unit/handlers/test_container_engine.py index 280ab9cf0..5c814c53d 100644 --- a/tests/unit/handlers/test_container_engine.py +++ b/tests/unit/handlers/test_container_engine.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest import unittest @@ -27,18 +28,30 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - handler = self._make_one() + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one() self.assertIsNone(handler.name) def test_ctor_w_name(self): - handler = self._make_one(name="foo") + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one(name="foo") self.assertEqual(handler.name, "foo") def test_format(self): import logging import json - handler = self._make_one() + with pytest.warns( + DeprecationWarning, + match="ContainerEngineHandler is deprecated. Use StructuredLogHandler instead", + ): + handler = self._make_one() logname = "loggername" message = "hello world,嗨 世界" record = logging.LogRecord( @@ -51,6 +64,10 @@ def test_format(self): "thread": record.thread, "severity": record.levelname, } - payload = handler.format(record) + with pytest.warns( + DeprecationWarning, + match="format_stackdriver_json is deprecated. Use StructuredLogHandler instead", + ): + payload = handler.format(record) self.assertEqual(payload, json.dumps(expected_payload, ensure_ascii=False)) From c4471758e1efee0e3599b08969449b2ce71bd1b4 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 5 Dec 2023 13:39:57 -0500 Subject: [PATCH 15/19] fix: Use warning instead of warn in system tests to avoid DeprecationWarning (#821) * fix: Use warning instead of warn in system tests to avoid DeprecationWarning * Removed ignore like from pytest.ini --- pytest.ini | 2 -- tests/system/test_system.py | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/pytest.ini b/pytest.ini index 994e939cb..8bc54e713 100644 --- a/pytest.ini +++ b/pytest.ini @@ -17,7 +17,5 @@ filterwarnings = ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:google.cloud.logging_v2.handlers.transports ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test__http ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test_entries - # Remove once https://github.com/googleapis/python-logging/issues/820 is fixed - ignore:.*warn.*is deprecated, use.*warning.*instead:DeprecationWarning # Remove once a version of grpcio newer than 1.59.3 is released to PyPI ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel diff --git a/tests/system/test_system.py b/tests/system/test_system.py index ec67a99d0..821a938df 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -605,7 +605,7 @@ def test_handlers_w_extras(self): "resource": Resource(type="cloudiot_device", labels={}), "labels": {"test-label": "manual"}, } - cloud_logger.warn(LOG_MESSAGE, extra=extra) + cloud_logger.warning(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) @@ -634,7 +634,7 @@ def test_handlers_w_json_fields(self): cloud_logger = logging.getLogger(LOGGER_NAME) cloud_logger.addHandler(handler) extra = {"json_fields": {"hello": "world", "two": 2}} - cloud_logger.warn(LOG_MESSAGE, extra=extra) + cloud_logger.warning(LOG_MESSAGE, extra=extra) entries = _list_entries(logger) self.assertEqual(len(entries), 1) From af76e38fa9321131b4cc0a4760a1b65029bd6225 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 6 Dec 2023 11:05:28 -0500 Subject: [PATCH 16/19] fix: Ignore Python37DeprecationWarnings from google.auth (#823) --- pytest.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pytest.ini b/pytest.ini index 8bc54e713..1d4be1ee6 100644 --- a/pytest.ini +++ b/pytest.ini @@ -19,3 +19,5 @@ filterwarnings = ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test_entries # Remove once a version of grpcio newer than 1.59.3 is released to PyPI ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel + # Remove after support for Python 3.7 is dropped + ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning From 2384981c9137a57a647a69a32b67dcacd619ea0a Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Fri, 8 Dec 2023 11:02:56 -0500 Subject: [PATCH 17/19] fix: Fixed DeprecationWarning for datetime objects for Python 3.12 (#824) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Fixed DeprecationWarning for datetime objects for Python 3.12 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../handlers/transports/background_thread.py | 4 +++- pytest.ini | 4 ---- tests/system/test_system.py | 4 ++-- tests/unit/test__http.py | 4 ++-- tests/unit/test_entries.py | 12 ++++++------ 5 files changed, 13 insertions(+), 15 deletions(-) diff --git a/google/cloud/logging_v2/handlers/transports/background_thread.py b/google/cloud/logging_v2/handlers/transports/background_thread.py index f361e043c..7cf2799f5 100644 --- a/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -240,7 +240,9 @@ def enqueue(self, record, message, **kwargs): queue_entry = { "message": message, "severity": _helpers._normalize_severity(record.levelno), - "timestamp": datetime.datetime.utcfromtimestamp(record.created), + "timestamp": datetime.datetime.fromtimestamp( + record.created, datetime.timezone.utc + ), } queue_entry.update(kwargs) self._queue.put_nowait(queue_entry) diff --git a/pytest.ini b/pytest.ini index 1d4be1ee6..15e373380 100644 --- a/pytest.ini +++ b/pytest.ini @@ -13,10 +13,6 @@ filterwarnings = ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers # Remove once release PR https://github.com/googleapis/python-api-core/pull/555 is merged ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:google.api_core.datetime_helpers - # Remove once https://github.com/googleapis/python-logging/issues/818 is fixed - ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:google.cloud.logging_v2.handlers.transports - ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test__http - ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:tests.unit.test_entries # Remove once a version of grpcio newer than 1.59.3 is released to PyPI ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel # Remove after support for Python 3.7 is dropped diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 821a938df..c5000f146 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -336,7 +336,7 @@ def test_log_text_with_timestamp(self): text_payload = "System test: test_log_text_with_timestamp" gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) - now = datetime.utcnow() + now = datetime.now(timezone.utc) loggers = ( [gapic_logger] if Config.use_mtls == "always" @@ -356,7 +356,7 @@ def test_log_text_with_resource(self): gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) - now = datetime.utcnow() + now = datetime.now(timezone.utc) loggers = ( [gapic_logger] if Config.use_mtls == "always" diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index 0e83bd82c..5709a50a6 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -122,9 +122,9 @@ def test_ctor(self): @staticmethod def _make_timestamp(): import datetime - from google.cloud._helpers import UTC + from datetime import timezone - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.datetime.now(timezone.utc) return NOW, _datetime_to_rfc3339_w_nanos(NOW) def test_list_entries_with_limits(self): diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py index e7bf30d87..382674ebd 100644 --- a/tests/unit/test_entries.py +++ b/tests/unit/test_entries.py @@ -200,14 +200,14 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime - from google.cloud._helpers import UTC + from datetime import timezone from google.cloud.logging import Resource klass = self._get_target_class() client = _Client(self.PROJECT) SEVERITY = "CRITICAL" IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) LABELS = {"foo": "bar", "baz": "qux"} @@ -283,11 +283,11 @@ def test_from_api_repr_w_loggers_no_logger_match(self): def test_from_api_repr_w_loggers_w_logger_match(self): from datetime import datetime from datetime import timedelta - from google.cloud._helpers import UTC + from datetime import timezone client = _Client(self.PROJECT) IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) LATER = NOW + timedelta(seconds=1) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) @@ -341,11 +341,11 @@ def test_from_api_repr_w_loggers_w_logger_match(self): def test_from_api_repr_w_folder_path(self): from datetime import datetime from datetime import timedelta - from google.cloud._helpers import UTC + from datetime import timezone client = _Client(self.PROJECT) IID = "IID" - NOW = datetime.utcnow().replace(tzinfo=UTC) + NOW = datetime.now(timezone.utc) LATER = NOW + timedelta(seconds=1) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) From f9392fd586524f525a107eda9a362741d005b9ce Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 8 Dec 2023 16:03:34 +0000 Subject: [PATCH 18/19] chore(main): release 3.9.0 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 17 +++++++++++++++++ google/cloud/logging/gapic_version.py | 2 +- google/cloud/logging_v2/gapic_version.py | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- 5 files changed, 21 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index fa4291eb2..7c3079b2d 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.8.0" + ".": "3.9.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index d8cfadcaa..0ae1f74bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.9.0](https://github.com/googleapis/python-logging/compare/v3.8.0...v3.9.0) (2023-12-08) + + +### Features + +* Add support for Python 3.12 ([#813](https://github.com/googleapis/python-logging/issues/813)) ([6591b53](https://github.com/googleapis/python-logging/commit/6591b53e3fcd67e156765f329700443647b70349)) +* Use native namespaces instead of pkg_resources ([#812](https://github.com/googleapis/python-logging/issues/812)) ([10ad75d](https://github.com/googleapis/python-logging/commit/10ad75d2b9276df389f5069f9f143f8f4621d04d)) + + +### Bug Fixes + +* Fixed DeprecationWarning for datetime objects for Python 3.12 ([#824](https://github.com/googleapis/python-logging/issues/824)) ([2384981](https://github.com/googleapis/python-logging/commit/2384981c9137a57a647a69a32b67dcacd619ea0a)) +* Fixed object paths in autogenerated code in owlbot.py ([#804](https://github.com/googleapis/python-logging/issues/804)) ([b14bb14](https://github.com/googleapis/python-logging/commit/b14bb144fad2dcf067b7e62e402b708f45ebadbe)) +* Updated protobuf JSON formatting to support nested protobufs ([#797](https://github.com/googleapis/python-logging/issues/797)) ([a00c261](https://github.com/googleapis/python-logging/commit/a00c261ee07a5dcaac9f5b966b4bb6729a2bbe65)) +* Use `retry_async` instead of `retry` in async client ([#816](https://github.com/googleapis/python-logging/issues/816)) ([c79f7f5](https://github.com/googleapis/python-logging/commit/c79f7f55dddb170eac29f24b23bfe1dde8bfbda8)) +* Use warning instead of warn in system tests to avoid DeprecationWarning ([#821](https://github.com/googleapis/python-logging/issues/821)) ([c447175](https://github.com/googleapis/python-logging/commit/c4471758e1efee0e3599b08969449b2ce71bd1b4)) + ## [3.8.0](https://github.com/googleapis/python-logging/compare/v3.7.0...v3.8.0) (2023-10-03) diff --git a/google/cloud/logging/gapic_version.py b/google/cloud/logging/gapic_version.py index 4052fbb0c..90b3aae3d 100644 --- a/google/cloud/logging/gapic_version.py +++ b/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.8.0" # {x-release-please-version} +__version__ = "3.9.0" # {x-release-please-version} diff --git a/google/cloud/logging_v2/gapic_version.py b/google/cloud/logging_v2/gapic_version.py index 4052fbb0c..90b3aae3d 100644 --- a/google/cloud/logging_v2/gapic_version.py +++ b/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.8.0" # {x-release-please-version} +__version__ = "3.9.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.logging.v2.json b/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba6..f1b714b6b 100644 --- a/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "3.9.0" }, "snippets": [ { From 9638944219fc00a3457fb132fae4fd5ef7d42fe9 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 11 Dec 2023 15:56:24 +0000 Subject: [PATCH 19/19] chore: release candidate v3.9.0rc1 --- CHANGELOG.md | 2 +- google/cloud/logging/gapic_version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ae1f74bb..56896a07e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ [1]: https://pypi.org/project/google-cloud-logging/#history -## [3.9.0](https://github.com/googleapis/python-logging/compare/v3.8.0...v3.9.0) (2023-12-08) +## [3.9.0rc1](https://github.com/googleapis/python-logging/compare/v3.8.0...v3.9.0rc1) (2023-12-08) ### Features diff --git a/google/cloud/logging/gapic_version.py b/google/cloud/logging/gapic_version.py index 90b3aae3d..23962727d 100644 --- a/google/cloud/logging/gapic_version.py +++ b/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.9.0" # {x-release-please-version} +__version__ = "3.9.0rc1" # {x-release-please-version}