From bee12d3e1e097a2a9de44fd4ee46817d303846bd Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 13 Dec 2022 20:55:22 +0000 Subject: [PATCH 1/2] chore: Update gapic-generator-python to v1.7.1 PiperOrigin-RevId: 495049888 Source-Link: https://github.com/googleapis/googleapis/commit/bf4359caacb6583399306cd1c13c2e00b2a832f9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/5ab9576eafbd4de436896f01c278757d87a24e27 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWFiOTU3NmVhZmJkNGRlNDM2ODk2ZjAxYzI3ODc1N2Q4N2EyNGUyNyJ9 --- owl-bot-staging/v1beta3/.coveragerc | 12 + owl-bot-staging/v1beta3/.flake8 | 33 + owl-bot-staging/v1beta3/MANIFEST.in | 2 + owl-bot-staging/v1beta3/README.rst | 49 + owl-bot-staging/v1beta3/docs/conf.py | 376 ++ .../flex_templates_service.rst | 6 + .../docs/dataflow_v1beta3/jobs_v1_beta3.rst | 10 + .../dataflow_v1beta3/messages_v1_beta3.rst | 10 + .../dataflow_v1beta3/metrics_v1_beta3.rst | 10 + .../docs/dataflow_v1beta3/services.rst | 11 + .../dataflow_v1beta3/snapshots_v1_beta3.rst | 6 + .../dataflow_v1beta3/templates_service.rst | 6 + .../v1beta3/docs/dataflow_v1beta3/types.rst | 6 + owl-bot-staging/v1beta3/docs/index.rst | 7 + .../v1beta3/google/cloud/dataflow/__init__.py | 257 ++ .../google/cloud/dataflow/gapic_version.py | 16 + .../v1beta3/google/cloud/dataflow/py.typed | 2 + .../google/cloud/dataflow_v1beta3/__init__.py | 258 ++ .../dataflow_v1beta3/gapic_metadata.json | 393 ++ .../cloud/dataflow_v1beta3/gapic_version.py | 16 + .../google/cloud/dataflow_v1beta3/py.typed | 2 + .../dataflow_v1beta3/services/__init__.py | 15 + .../flex_templates_service/__init__.py | 22 + .../flex_templates_service/async_client.py | 279 ++ .../services/flex_templates_service/client.py | 475 +++ .../transports/__init__.py | 38 + .../flex_templates_service/transports/base.py | 151 + .../flex_templates_service/transports/grpc.py | 265 ++ .../transports/grpc_asyncio.py | 264 ++ .../flex_templates_service/transports/rest.py | 294 ++ .../services/jobs_v1_beta3/__init__.py | 22 + .../services/jobs_v1_beta3/async_client.py | 825 ++++ .../services/jobs_v1_beta3/client.py | 1027 +++++ .../services/jobs_v1_beta3/pagers.py | 260 ++ .../jobs_v1_beta3/transports/__init__.py | 38 + .../services/jobs_v1_beta3/transports/base.py | 236 ++ .../services/jobs_v1_beta3/transports/grpc.py | 451 ++ .../jobs_v1_beta3/transports/grpc_asyncio.py | 450 ++ .../services/jobs_v1_beta3/transports/rest.py | 902 ++++ .../services/messages_v1_beta3/__init__.py | 22 + .../messages_v1_beta3/async_client.py | 304 ++ .../services/messages_v1_beta3/client.py | 500 +++ .../services/messages_v1_beta3/pagers.py | 139 + .../messages_v1_beta3/transports/__init__.py | 38 + .../messages_v1_beta3/transports/base.py | 151 + .../messages_v1_beta3/transports/grpc.py | 273 ++ .../transports/grpc_asyncio.py | 272 ++ .../messages_v1_beta3/transports/rest.py | 292 ++ .../services/metrics_v1_beta3/__init__.py | 22 + .../services/metrics_v1_beta3/async_client.py | 496 +++ .../services/metrics_v1_beta3/client.py | 694 ++++ .../services/metrics_v1_beta3/pagers.py | 260 ++ .../metrics_v1_beta3/transports/__init__.py | 38 + .../metrics_v1_beta3/transports/base.py | 179 + .../metrics_v1_beta3/transports/grpc.py | 332 ++ .../transports/grpc_asyncio.py | 331 ++ .../metrics_v1_beta3/transports/rest.py | 508 +++ .../services/snapshots_v1_beta3/__init__.py | 22 + .../snapshots_v1_beta3/async_client.py | 437 ++ .../services/snapshots_v1_beta3/client.py | 635 +++ .../snapshots_v1_beta3/transports/__init__.py | 38 + .../snapshots_v1_beta3/transports/base.py | 179 + .../snapshots_v1_beta3/transports/grpc.py | 317 ++ .../transports/grpc_asyncio.py | 316 ++ .../snapshots_v1_beta3/transports/rest.py | 503 +++ .../services/templates_service/__init__.py | 22 + .../templates_service/async_client.py | 446 ++ .../services/templates_service/client.py | 644 +++ .../templates_service/transports/__init__.py | 38 + .../templates_service/transports/base.py | 180 + .../templates_service/transports/grpc.py | 318 ++ .../transports/grpc_asyncio.py | 317 ++ .../templates_service/transports/rest.py | 528 +++ .../cloud/dataflow_v1beta3/types/__init__.py | 242 ++ .../dataflow_v1beta3/types/environment.py | 891 ++++ .../cloud/dataflow_v1beta3/types/jobs.py | 1425 +++++++ .../cloud/dataflow_v1beta3/types/messages.py | 302 ++ .../cloud/dataflow_v1beta3/types/metrics.py | 619 +++ .../cloud/dataflow_v1beta3/types/snapshots.py | 253 ++ .../cloud/dataflow_v1beta3/types/streaming.py | 501 +++ .../cloud/dataflow_v1beta3/types/templates.py | 1063 +++++ owl-bot-staging/v1beta3/mypy.ini | 3 + owl-bot-staging/v1beta3/noxfile.py | 183 + ...ates_service_launch_flex_template_async.py | 51 + ...lates_service_launch_flex_template_sync.py | 51 + ...obs_v1_beta3_aggregated_list_jobs_async.py | 52 + ...jobs_v1_beta3_aggregated_list_jobs_sync.py | 52 + ...d_jobs_v1_beta3_check_active_jobs_async.py | 51 + ...ed_jobs_v1_beta3_check_active_jobs_sync.py | 51 + ...enerated_jobs_v1_beta3_create_job_async.py | 51 + ...generated_jobs_v1_beta3_create_job_sync.py | 51 + ...3_generated_jobs_v1_beta3_get_job_async.py | 51 + ...a3_generated_jobs_v1_beta3_get_job_sync.py | 51 + ...generated_jobs_v1_beta3_list_jobs_async.py | 52 + ..._generated_jobs_v1_beta3_list_jobs_sync.py | 52 + ...erated_jobs_v1_beta3_snapshot_job_async.py | 51 + ...nerated_jobs_v1_beta3_snapshot_job_sync.py | 51 + ...enerated_jobs_v1_beta3_update_job_async.py | 51 + ...generated_jobs_v1_beta3_update_job_sync.py | 51 + ...ssages_v1_beta3_list_job_messages_async.py | 52 + ...essages_v1_beta3_list_job_messages_sync.py | 52 + ...1_beta3_get_job_execution_details_async.py | 52 + ...v1_beta3_get_job_execution_details_sync.py | 52 + ..._metrics_v1_beta3_get_job_metrics_async.py | 51 + ...d_metrics_v1_beta3_get_job_metrics_sync.py | 51 + ...beta3_get_stage_execution_details_async.py | 52 + ..._beta3_get_stage_execution_details_sync.py | 52 + ...napshots_v1_beta3_delete_snapshot_async.py | 51 + ...snapshots_v1_beta3_delete_snapshot_sync.py | 51 + ...d_snapshots_v1_beta3_get_snapshot_async.py | 51 + ...ed_snapshots_v1_beta3_get_snapshot_sync.py | 51 + ...snapshots_v1_beta3_list_snapshots_async.py | 51 + ..._snapshots_v1_beta3_list_snapshots_sync.py | 51 + ..._service_create_job_from_template_async.py | 52 + ...s_service_create_job_from_template_sync.py | 52 + ...ed_templates_service_get_template_async.py | 52 + ...ted_templates_service_get_template_sync.py | 52 + ...templates_service_launch_template_async.py | 52 + ..._templates_service_launch_template_sync.py | 52 + ...ppet_metadata_google.dataflow.v1beta3.json | 2769 +++++++++++++ .../fixup_dataflow_v1beta3_keywords.py | 193 + owl-bot-staging/v1beta3/setup.py | 90 + .../v1beta3/testing/constraints-3.10.txt | 6 + .../v1beta3/testing/constraints-3.11.txt | 6 + .../v1beta3/testing/constraints-3.7.txt | 9 + .../v1beta3/testing/constraints-3.8.txt | 6 + .../v1beta3/testing/constraints-3.9.txt | 6 + owl-bot-staging/v1beta3/tests/__init__.py | 16 + .../v1beta3/tests/unit/__init__.py | 16 + .../v1beta3/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/dataflow_v1beta3/__init__.py | 16 + .../test_flex_templates_service.py | 1459 +++++++ .../dataflow_v1beta3/test_jobs_v1_beta3.py | 3644 +++++++++++++++++ .../test_messages_v1_beta3.py | 1713 ++++++++ .../dataflow_v1beta3/test_metrics_v1_beta3.py | 2477 +++++++++++ .../test_snapshots_v1_beta3.py | 2013 +++++++++ .../test_templates_service.py | 2059 ++++++++++ 137 files changed, 41141 insertions(+) create mode 100644 owl-bot-staging/v1beta3/.coveragerc create mode 100644 owl-bot-staging/v1beta3/.flake8 create mode 100644 owl-bot-staging/v1beta3/MANIFEST.in create mode 100644 owl-bot-staging/v1beta3/README.rst create mode 100644 owl-bot-staging/v1beta3/docs/conf.py create mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/flex_templates_service.rst create mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/jobs_v1_beta3.rst create mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/messages_v1_beta3.rst create mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/metrics_v1_beta3.rst create mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/services.rst create mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/snapshots_v1_beta3.rst create mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/templates_service.rst create mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/types.rst create mode 100644 owl-bot-staging/v1beta3/docs/index.rst create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow/gapic_version.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow/py.typed create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_metadata.json create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_version.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/py.typed create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/client.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/__init__.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/environment.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/jobs.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/messages.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/metrics.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/snapshots.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/streaming.py create mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/templates.py create mode 100644 owl-bot-staging/v1beta3/mypy.ini create mode 100644 owl-bot-staging/v1beta3/noxfile.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py create mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json create mode 100644 owl-bot-staging/v1beta3/scripts/fixup_dataflow_v1beta3_keywords.py create mode 100644 owl-bot-staging/v1beta3/setup.py create mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1beta3/tests/__init__.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/__init__.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py create mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py diff --git a/owl-bot-staging/v1beta3/.coveragerc b/owl-bot-staging/v1beta3/.coveragerc new file mode 100644 index 0000000..bcaea1e --- /dev/null +++ b/owl-bot-staging/v1beta3/.coveragerc @@ -0,0 +1,12 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dataflow/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1beta3/.flake8 b/owl-bot-staging/v1beta3/.flake8 new file mode 100644 index 0000000..29227d4 --- /dev/null +++ b/owl-bot-staging/v1beta3/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1beta3/MANIFEST.in b/owl-bot-staging/v1beta3/MANIFEST.in new file mode 100644 index 0000000..8b3924f --- /dev/null +++ b/owl-bot-staging/v1beta3/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/dataflow *.py +recursive-include google/cloud/dataflow_v1beta3 *.py diff --git a/owl-bot-staging/v1beta3/README.rst b/owl-bot-staging/v1beta3/README.rst new file mode 100644 index 0000000..28a6ee5 --- /dev/null +++ b/owl-bot-staging/v1beta3/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Dataflow API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Dataflow API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1beta3/docs/conf.py b/owl-bot-staging/v1beta3/docs/conf.py new file mode 100644 index 0000000..635d9a4 --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dataflow-client documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-dataflow-client" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dataflow-client-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-dataflow-client.tex", + u"google-cloud-dataflow-client Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-dataflow-client", + u"Google Cloud Dataflow Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-dataflow-client", + u"google-cloud-dataflow-client Documentation", + author, + "google-cloud-dataflow-client", + "GAPIC library for Google Cloud Dataflow API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/flex_templates_service.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/flex_templates_service.rst new file mode 100644 index 0000000..5fc4461 --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/flex_templates_service.rst @@ -0,0 +1,6 @@ +FlexTemplatesService +-------------------------------------- + +.. automodule:: google.cloud.dataflow_v1beta3.services.flex_templates_service + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/jobs_v1_beta3.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/jobs_v1_beta3.rst new file mode 100644 index 0000000..d2d95a8 --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/jobs_v1_beta3.rst @@ -0,0 +1,10 @@ +JobsV1Beta3 +----------------------------- + +.. automodule:: google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 + :members: + :inherited-members: + +.. automodule:: google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/messages_v1_beta3.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/messages_v1_beta3.rst new file mode 100644 index 0000000..0915205 --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/messages_v1_beta3.rst @@ -0,0 +1,10 @@ +MessagesV1Beta3 +--------------------------------- + +.. automodule:: google.cloud.dataflow_v1beta3.services.messages_v1_beta3 + :members: + :inherited-members: + +.. automodule:: google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/metrics_v1_beta3.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/metrics_v1_beta3.rst new file mode 100644 index 0000000..8ca5594 --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/metrics_v1_beta3.rst @@ -0,0 +1,10 @@ +MetricsV1Beta3 +-------------------------------- + +.. automodule:: google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 + :members: + :inherited-members: + +.. automodule:: google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/services.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/services.rst new file mode 100644 index 0000000..d890af6 --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/services.rst @@ -0,0 +1,11 @@ +Services for Google Cloud Dataflow v1beta3 API +============================================== +.. toctree:: + :maxdepth: 2 + + flex_templates_service + jobs_v1_beta3 + messages_v1_beta3 + metrics_v1_beta3 + snapshots_v1_beta3 + templates_service diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/snapshots_v1_beta3.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/snapshots_v1_beta3.rst new file mode 100644 index 0000000..4619e4d --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/snapshots_v1_beta3.rst @@ -0,0 +1,6 @@ +SnapshotsV1Beta3 +---------------------------------- + +.. automodule:: google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/templates_service.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/templates_service.rst new file mode 100644 index 0000000..ad832aa --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/templates_service.rst @@ -0,0 +1,6 @@ +TemplatesService +---------------------------------- + +.. automodule:: google.cloud.dataflow_v1beta3.services.templates_service + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/types.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/types.rst new file mode 100644 index 0000000..a768d4a --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Dataflow v1beta3 API +=========================================== + +.. automodule:: google.cloud.dataflow_v1beta3.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1beta3/docs/index.rst b/owl-bot-staging/v1beta3/docs/index.rst new file mode 100644 index 0000000..59da2fa --- /dev/null +++ b/owl-bot-staging/v1beta3/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dataflow_v1beta3/services + dataflow_v1beta3/types diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow/__init__.py new file mode 100644 index 0000000..bace5a5 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow/__init__.py @@ -0,0 +1,257 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dataflow import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.dataflow_v1beta3.services.flex_templates_service.client import FlexTemplatesServiceClient +from google.cloud.dataflow_v1beta3.services.flex_templates_service.async_client import FlexTemplatesServiceAsyncClient +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.client import JobsV1Beta3Client +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.async_client import JobsV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.client import MessagesV1Beta3Client +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.async_client import MessagesV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.client import MetricsV1Beta3Client +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.async_client import MetricsV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.client import SnapshotsV1Beta3Client +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.async_client import SnapshotsV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.templates_service.client import TemplatesServiceClient +from google.cloud.dataflow_v1beta3.services.templates_service.async_client import TemplatesServiceAsyncClient + +from google.cloud.dataflow_v1beta3.types.environment import AutoscalingSettings +from google.cloud.dataflow_v1beta3.types.environment import DebugOptions +from google.cloud.dataflow_v1beta3.types.environment import Disk +from google.cloud.dataflow_v1beta3.types.environment import Environment +from google.cloud.dataflow_v1beta3.types.environment import Package +from google.cloud.dataflow_v1beta3.types.environment import SdkHarnessContainerImage +from google.cloud.dataflow_v1beta3.types.environment import TaskRunnerSettings +from google.cloud.dataflow_v1beta3.types.environment import WorkerPool +from google.cloud.dataflow_v1beta3.types.environment import WorkerSettings +from google.cloud.dataflow_v1beta3.types.environment import AutoscalingAlgorithm +from google.cloud.dataflow_v1beta3.types.environment import DefaultPackageSet +from google.cloud.dataflow_v1beta3.types.environment import FlexResourceSchedulingGoal +from google.cloud.dataflow_v1beta3.types.environment import JobType +from google.cloud.dataflow_v1beta3.types.environment import ShuffleMode +from google.cloud.dataflow_v1beta3.types.environment import TeardownPolicy +from google.cloud.dataflow_v1beta3.types.environment import WorkerIPAddressConfiguration +from google.cloud.dataflow_v1beta3.types.jobs import BigQueryIODetails +from google.cloud.dataflow_v1beta3.types.jobs import BigTableIODetails +from google.cloud.dataflow_v1beta3.types.jobs import CheckActiveJobsRequest +from google.cloud.dataflow_v1beta3.types.jobs import CheckActiveJobsResponse +from google.cloud.dataflow_v1beta3.types.jobs import CreateJobRequest +from google.cloud.dataflow_v1beta3.types.jobs import DatastoreIODetails +from google.cloud.dataflow_v1beta3.types.jobs import DisplayData +from google.cloud.dataflow_v1beta3.types.jobs import ExecutionStageState +from google.cloud.dataflow_v1beta3.types.jobs import ExecutionStageSummary +from google.cloud.dataflow_v1beta3.types.jobs import FailedLocation +from google.cloud.dataflow_v1beta3.types.jobs import FileIODetails +from google.cloud.dataflow_v1beta3.types.jobs import GetJobRequest +from google.cloud.dataflow_v1beta3.types.jobs import Job +from google.cloud.dataflow_v1beta3.types.jobs import JobExecutionInfo +from google.cloud.dataflow_v1beta3.types.jobs import JobExecutionStageInfo +from google.cloud.dataflow_v1beta3.types.jobs import JobMetadata +from google.cloud.dataflow_v1beta3.types.jobs import ListJobsRequest +from google.cloud.dataflow_v1beta3.types.jobs import ListJobsResponse +from google.cloud.dataflow_v1beta3.types.jobs import PipelineDescription +from google.cloud.dataflow_v1beta3.types.jobs import PubSubIODetails +from google.cloud.dataflow_v1beta3.types.jobs import SdkVersion +from google.cloud.dataflow_v1beta3.types.jobs import SnapshotJobRequest +from google.cloud.dataflow_v1beta3.types.jobs import SpannerIODetails +from google.cloud.dataflow_v1beta3.types.jobs import Step +from google.cloud.dataflow_v1beta3.types.jobs import TransformSummary +from google.cloud.dataflow_v1beta3.types.jobs import UpdateJobRequest +from google.cloud.dataflow_v1beta3.types.jobs import JobState +from google.cloud.dataflow_v1beta3.types.jobs import JobView +from google.cloud.dataflow_v1beta3.types.jobs import KindType +from google.cloud.dataflow_v1beta3.types.messages import AutoscalingEvent +from google.cloud.dataflow_v1beta3.types.messages import JobMessage +from google.cloud.dataflow_v1beta3.types.messages import ListJobMessagesRequest +from google.cloud.dataflow_v1beta3.types.messages import ListJobMessagesResponse +from google.cloud.dataflow_v1beta3.types.messages import StructuredMessage +from google.cloud.dataflow_v1beta3.types.messages import JobMessageImportance +from google.cloud.dataflow_v1beta3.types.metrics import GetJobExecutionDetailsRequest +from google.cloud.dataflow_v1beta3.types.metrics import GetJobMetricsRequest +from google.cloud.dataflow_v1beta3.types.metrics import GetStageExecutionDetailsRequest +from google.cloud.dataflow_v1beta3.types.metrics import JobExecutionDetails +from google.cloud.dataflow_v1beta3.types.metrics import JobMetrics +from google.cloud.dataflow_v1beta3.types.metrics import MetricStructuredName +from google.cloud.dataflow_v1beta3.types.metrics import MetricUpdate +from google.cloud.dataflow_v1beta3.types.metrics import ProgressTimeseries +from google.cloud.dataflow_v1beta3.types.metrics import StageExecutionDetails +from google.cloud.dataflow_v1beta3.types.metrics import StageSummary +from google.cloud.dataflow_v1beta3.types.metrics import WorkerDetails +from google.cloud.dataflow_v1beta3.types.metrics import WorkItemDetails +from google.cloud.dataflow_v1beta3.types.metrics import ExecutionState +from google.cloud.dataflow_v1beta3.types.snapshots import DeleteSnapshotRequest +from google.cloud.dataflow_v1beta3.types.snapshots import DeleteSnapshotResponse +from google.cloud.dataflow_v1beta3.types.snapshots import GetSnapshotRequest +from google.cloud.dataflow_v1beta3.types.snapshots import ListSnapshotsRequest +from google.cloud.dataflow_v1beta3.types.snapshots import ListSnapshotsResponse +from google.cloud.dataflow_v1beta3.types.snapshots import PubsubSnapshotMetadata +from google.cloud.dataflow_v1beta3.types.snapshots import Snapshot +from google.cloud.dataflow_v1beta3.types.snapshots import SnapshotState +from google.cloud.dataflow_v1beta3.types.streaming import ComputationTopology +from google.cloud.dataflow_v1beta3.types.streaming import CustomSourceLocation +from google.cloud.dataflow_v1beta3.types.streaming import DataDiskAssignment +from google.cloud.dataflow_v1beta3.types.streaming import KeyRangeDataDiskAssignment +from google.cloud.dataflow_v1beta3.types.streaming import KeyRangeLocation +from google.cloud.dataflow_v1beta3.types.streaming import MountedDataDisk +from google.cloud.dataflow_v1beta3.types.streaming import PubsubLocation +from google.cloud.dataflow_v1beta3.types.streaming import StateFamilyConfig +from google.cloud.dataflow_v1beta3.types.streaming import StreamingApplianceSnapshotConfig +from google.cloud.dataflow_v1beta3.types.streaming import StreamingComputationRanges +from google.cloud.dataflow_v1beta3.types.streaming import StreamingSideInputLocation +from google.cloud.dataflow_v1beta3.types.streaming import StreamingStageLocation +from google.cloud.dataflow_v1beta3.types.streaming import StreamLocation +from google.cloud.dataflow_v1beta3.types.streaming import TopologyConfig +from google.cloud.dataflow_v1beta3.types.templates import ContainerSpec +from google.cloud.dataflow_v1beta3.types.templates import CreateJobFromTemplateRequest +from google.cloud.dataflow_v1beta3.types.templates import DynamicTemplateLaunchParams +from google.cloud.dataflow_v1beta3.types.templates import FlexTemplateRuntimeEnvironment +from google.cloud.dataflow_v1beta3.types.templates import GetTemplateRequest +from google.cloud.dataflow_v1beta3.types.templates import GetTemplateResponse +from google.cloud.dataflow_v1beta3.types.templates import InvalidTemplateParameters +from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateParameter +from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateRequest +from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateResponse +from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateParameters +from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateRequest +from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateResponse +from google.cloud.dataflow_v1beta3.types.templates import ParameterMetadata +from google.cloud.dataflow_v1beta3.types.templates import RuntimeEnvironment +from google.cloud.dataflow_v1beta3.types.templates import RuntimeMetadata +from google.cloud.dataflow_v1beta3.types.templates import SDKInfo +from google.cloud.dataflow_v1beta3.types.templates import TemplateMetadata +from google.cloud.dataflow_v1beta3.types.templates import ParameterType + +__all__ = ('FlexTemplatesServiceClient', + 'FlexTemplatesServiceAsyncClient', + 'JobsV1Beta3Client', + 'JobsV1Beta3AsyncClient', + 'MessagesV1Beta3Client', + 'MessagesV1Beta3AsyncClient', + 'MetricsV1Beta3Client', + 'MetricsV1Beta3AsyncClient', + 'SnapshotsV1Beta3Client', + 'SnapshotsV1Beta3AsyncClient', + 'TemplatesServiceClient', + 'TemplatesServiceAsyncClient', + 'AutoscalingSettings', + 'DebugOptions', + 'Disk', + 'Environment', + 'Package', + 'SdkHarnessContainerImage', + 'TaskRunnerSettings', + 'WorkerPool', + 'WorkerSettings', + 'AutoscalingAlgorithm', + 'DefaultPackageSet', + 'FlexResourceSchedulingGoal', + 'JobType', + 'ShuffleMode', + 'TeardownPolicy', + 'WorkerIPAddressConfiguration', + 'BigQueryIODetails', + 'BigTableIODetails', + 'CheckActiveJobsRequest', + 'CheckActiveJobsResponse', + 'CreateJobRequest', + 'DatastoreIODetails', + 'DisplayData', + 'ExecutionStageState', + 'ExecutionStageSummary', + 'FailedLocation', + 'FileIODetails', + 'GetJobRequest', + 'Job', + 'JobExecutionInfo', + 'JobExecutionStageInfo', + 'JobMetadata', + 'ListJobsRequest', + 'ListJobsResponse', + 'PipelineDescription', + 'PubSubIODetails', + 'SdkVersion', + 'SnapshotJobRequest', + 'SpannerIODetails', + 'Step', + 'TransformSummary', + 'UpdateJobRequest', + 'JobState', + 'JobView', + 'KindType', + 'AutoscalingEvent', + 'JobMessage', + 'ListJobMessagesRequest', + 'ListJobMessagesResponse', + 'StructuredMessage', + 'JobMessageImportance', + 'GetJobExecutionDetailsRequest', + 'GetJobMetricsRequest', + 'GetStageExecutionDetailsRequest', + 'JobExecutionDetails', + 'JobMetrics', + 'MetricStructuredName', + 'MetricUpdate', + 'ProgressTimeseries', + 'StageExecutionDetails', + 'StageSummary', + 'WorkerDetails', + 'WorkItemDetails', + 'ExecutionState', + 'DeleteSnapshotRequest', + 'DeleteSnapshotResponse', + 'GetSnapshotRequest', + 'ListSnapshotsRequest', + 'ListSnapshotsResponse', + 'PubsubSnapshotMetadata', + 'Snapshot', + 'SnapshotState', + 'ComputationTopology', + 'CustomSourceLocation', + 'DataDiskAssignment', + 'KeyRangeDataDiskAssignment', + 'KeyRangeLocation', + 'MountedDataDisk', + 'PubsubLocation', + 'StateFamilyConfig', + 'StreamingApplianceSnapshotConfig', + 'StreamingComputationRanges', + 'StreamingSideInputLocation', + 'StreamingStageLocation', + 'StreamLocation', + 'TopologyConfig', + 'ContainerSpec', + 'CreateJobFromTemplateRequest', + 'DynamicTemplateLaunchParams', + 'FlexTemplateRuntimeEnvironment', + 'GetTemplateRequest', + 'GetTemplateResponse', + 'InvalidTemplateParameters', + 'LaunchFlexTemplateParameter', + 'LaunchFlexTemplateRequest', + 'LaunchFlexTemplateResponse', + 'LaunchTemplateParameters', + 'LaunchTemplateRequest', + 'LaunchTemplateResponse', + 'ParameterMetadata', + 'RuntimeEnvironment', + 'RuntimeMetadata', + 'SDKInfo', + 'TemplateMetadata', + 'ParameterType', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow/gapic_version.py b/owl-bot-staging/v1beta3/google/cloud/dataflow/gapic_version.py new file mode 100644 index 0000000..405b1ce --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow/py.typed b/owl-bot-staging/v1beta3/google/cloud/dataflow/py.typed new file mode 100644 index 0000000..db7ad15 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataflow-client package uses inline types. diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/__init__.py new file mode 100644 index 0000000..d2ab386 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/__init__.py @@ -0,0 +1,258 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.dataflow import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.flex_templates_service import FlexTemplatesServiceClient +from .services.flex_templates_service import FlexTemplatesServiceAsyncClient +from .services.jobs_v1_beta3 import JobsV1Beta3Client +from .services.jobs_v1_beta3 import JobsV1Beta3AsyncClient +from .services.messages_v1_beta3 import MessagesV1Beta3Client +from .services.messages_v1_beta3 import MessagesV1Beta3AsyncClient +from .services.metrics_v1_beta3 import MetricsV1Beta3Client +from .services.metrics_v1_beta3 import MetricsV1Beta3AsyncClient +from .services.snapshots_v1_beta3 import SnapshotsV1Beta3Client +from .services.snapshots_v1_beta3 import SnapshotsV1Beta3AsyncClient +from .services.templates_service import TemplatesServiceClient +from .services.templates_service import TemplatesServiceAsyncClient + +from .types.environment import AutoscalingSettings +from .types.environment import DebugOptions +from .types.environment import Disk +from .types.environment import Environment +from .types.environment import Package +from .types.environment import SdkHarnessContainerImage +from .types.environment import TaskRunnerSettings +from .types.environment import WorkerPool +from .types.environment import WorkerSettings +from .types.environment import AutoscalingAlgorithm +from .types.environment import DefaultPackageSet +from .types.environment import FlexResourceSchedulingGoal +from .types.environment import JobType +from .types.environment import ShuffleMode +from .types.environment import TeardownPolicy +from .types.environment import WorkerIPAddressConfiguration +from .types.jobs import BigQueryIODetails +from .types.jobs import BigTableIODetails +from .types.jobs import CheckActiveJobsRequest +from .types.jobs import CheckActiveJobsResponse +from .types.jobs import CreateJobRequest +from .types.jobs import DatastoreIODetails +from .types.jobs import DisplayData +from .types.jobs import ExecutionStageState +from .types.jobs import ExecutionStageSummary +from .types.jobs import FailedLocation +from .types.jobs import FileIODetails +from .types.jobs import GetJobRequest +from .types.jobs import Job +from .types.jobs import JobExecutionInfo +from .types.jobs import JobExecutionStageInfo +from .types.jobs import JobMetadata +from .types.jobs import ListJobsRequest +from .types.jobs import ListJobsResponse +from .types.jobs import PipelineDescription +from .types.jobs import PubSubIODetails +from .types.jobs import SdkVersion +from .types.jobs import SnapshotJobRequest +from .types.jobs import SpannerIODetails +from .types.jobs import Step +from .types.jobs import TransformSummary +from .types.jobs import UpdateJobRequest +from .types.jobs import JobState +from .types.jobs import JobView +from .types.jobs import KindType +from .types.messages import AutoscalingEvent +from .types.messages import JobMessage +from .types.messages import ListJobMessagesRequest +from .types.messages import ListJobMessagesResponse +from .types.messages import StructuredMessage +from .types.messages import JobMessageImportance +from .types.metrics import GetJobExecutionDetailsRequest +from .types.metrics import GetJobMetricsRequest +from .types.metrics import GetStageExecutionDetailsRequest +from .types.metrics import JobExecutionDetails +from .types.metrics import JobMetrics +from .types.metrics import MetricStructuredName +from .types.metrics import MetricUpdate +from .types.metrics import ProgressTimeseries +from .types.metrics import StageExecutionDetails +from .types.metrics import StageSummary +from .types.metrics import WorkerDetails +from .types.metrics import WorkItemDetails +from .types.metrics import ExecutionState +from .types.snapshots import DeleteSnapshotRequest +from .types.snapshots import DeleteSnapshotResponse +from .types.snapshots import GetSnapshotRequest +from .types.snapshots import ListSnapshotsRequest +from .types.snapshots import ListSnapshotsResponse +from .types.snapshots import PubsubSnapshotMetadata +from .types.snapshots import Snapshot +from .types.snapshots import SnapshotState +from .types.streaming import ComputationTopology +from .types.streaming import CustomSourceLocation +from .types.streaming import DataDiskAssignment +from .types.streaming import KeyRangeDataDiskAssignment +from .types.streaming import KeyRangeLocation +from .types.streaming import MountedDataDisk +from .types.streaming import PubsubLocation +from .types.streaming import StateFamilyConfig +from .types.streaming import StreamingApplianceSnapshotConfig +from .types.streaming import StreamingComputationRanges +from .types.streaming import StreamingSideInputLocation +from .types.streaming import StreamingStageLocation +from .types.streaming import StreamLocation +from .types.streaming import TopologyConfig +from .types.templates import ContainerSpec +from .types.templates import CreateJobFromTemplateRequest +from .types.templates import DynamicTemplateLaunchParams +from .types.templates import FlexTemplateRuntimeEnvironment +from .types.templates import GetTemplateRequest +from .types.templates import GetTemplateResponse +from .types.templates import InvalidTemplateParameters +from .types.templates import LaunchFlexTemplateParameter +from .types.templates import LaunchFlexTemplateRequest +from .types.templates import LaunchFlexTemplateResponse +from .types.templates import LaunchTemplateParameters +from .types.templates import LaunchTemplateRequest +from .types.templates import LaunchTemplateResponse +from .types.templates import ParameterMetadata +from .types.templates import RuntimeEnvironment +from .types.templates import RuntimeMetadata +from .types.templates import SDKInfo +from .types.templates import TemplateMetadata +from .types.templates import ParameterType + +__all__ = ( + 'FlexTemplatesServiceAsyncClient', + 'JobsV1Beta3AsyncClient', + 'MessagesV1Beta3AsyncClient', + 'MetricsV1Beta3AsyncClient', + 'SnapshotsV1Beta3AsyncClient', + 'TemplatesServiceAsyncClient', +'AutoscalingAlgorithm', +'AutoscalingEvent', +'AutoscalingSettings', +'BigQueryIODetails', +'BigTableIODetails', +'CheckActiveJobsRequest', +'CheckActiveJobsResponse', +'ComputationTopology', +'ContainerSpec', +'CreateJobFromTemplateRequest', +'CreateJobRequest', +'CustomSourceLocation', +'DataDiskAssignment', +'DatastoreIODetails', +'DebugOptions', +'DefaultPackageSet', +'DeleteSnapshotRequest', +'DeleteSnapshotResponse', +'Disk', +'DisplayData', +'DynamicTemplateLaunchParams', +'Environment', +'ExecutionStageState', +'ExecutionStageSummary', +'ExecutionState', +'FailedLocation', +'FileIODetails', +'FlexResourceSchedulingGoal', +'FlexTemplateRuntimeEnvironment', +'FlexTemplatesServiceClient', +'GetJobExecutionDetailsRequest', +'GetJobMetricsRequest', +'GetJobRequest', +'GetSnapshotRequest', +'GetStageExecutionDetailsRequest', +'GetTemplateRequest', +'GetTemplateResponse', +'InvalidTemplateParameters', +'Job', +'JobExecutionDetails', +'JobExecutionInfo', +'JobExecutionStageInfo', +'JobMessage', +'JobMessageImportance', +'JobMetadata', +'JobMetrics', +'JobState', +'JobType', +'JobView', +'JobsV1Beta3Client', +'KeyRangeDataDiskAssignment', +'KeyRangeLocation', +'KindType', +'LaunchFlexTemplateParameter', +'LaunchFlexTemplateRequest', +'LaunchFlexTemplateResponse', +'LaunchTemplateParameters', +'LaunchTemplateRequest', +'LaunchTemplateResponse', +'ListJobMessagesRequest', +'ListJobMessagesResponse', +'ListJobsRequest', +'ListJobsResponse', +'ListSnapshotsRequest', +'ListSnapshotsResponse', +'MessagesV1Beta3Client', +'MetricStructuredName', +'MetricUpdate', +'MetricsV1Beta3Client', +'MountedDataDisk', +'Package', +'ParameterMetadata', +'ParameterType', +'PipelineDescription', +'ProgressTimeseries', +'PubSubIODetails', +'PubsubLocation', +'PubsubSnapshotMetadata', +'RuntimeEnvironment', +'RuntimeMetadata', +'SDKInfo', +'SdkHarnessContainerImage', +'SdkVersion', +'ShuffleMode', +'Snapshot', +'SnapshotJobRequest', +'SnapshotState', +'SnapshotsV1Beta3Client', +'SpannerIODetails', +'StageExecutionDetails', +'StageSummary', +'StateFamilyConfig', +'Step', +'StreamLocation', +'StreamingApplianceSnapshotConfig', +'StreamingComputationRanges', +'StreamingSideInputLocation', +'StreamingStageLocation', +'StructuredMessage', +'TaskRunnerSettings', +'TeardownPolicy', +'TemplateMetadata', +'TemplatesServiceClient', +'TopologyConfig', +'TransformSummary', +'UpdateJobRequest', +'WorkItemDetails', +'WorkerDetails', +'WorkerIPAddressConfiguration', +'WorkerPool', +'WorkerSettings', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_metadata.json b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_metadata.json new file mode 100644 index 0000000..ab8a5b6 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_metadata.json @@ -0,0 +1,393 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dataflow_v1beta3", + "protoPackage": "google.dataflow.v1beta3", + "schema": "1.0", + "services": { + "FlexTemplatesService": { + "clients": { + "grpc": { + "libraryClient": "FlexTemplatesServiceClient", + "rpcs": { + "LaunchFlexTemplate": { + "methods": [ + "launch_flex_template" + ] + } + } + }, + "grpc-async": { + "libraryClient": "FlexTemplatesServiceAsyncClient", + "rpcs": { + "LaunchFlexTemplate": { + "methods": [ + "launch_flex_template" + ] + } + } + }, + "rest": { + "libraryClient": "FlexTemplatesServiceClient", + "rpcs": { + "LaunchFlexTemplate": { + "methods": [ + "launch_flex_template" + ] + } + } + } + } + }, + "JobsV1Beta3": { + "clients": { + "grpc": { + "libraryClient": "JobsV1Beta3Client", + "rpcs": { + "AggregatedListJobs": { + "methods": [ + "aggregated_list_jobs" + ] + }, + "CheckActiveJobs": { + "methods": [ + "check_active_jobs" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SnapshotJob": { + "methods": [ + "snapshot_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "grpc-async": { + "libraryClient": "JobsV1Beta3AsyncClient", + "rpcs": { + "AggregatedListJobs": { + "methods": [ + "aggregated_list_jobs" + ] + }, + "CheckActiveJobs": { + "methods": [ + "check_active_jobs" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SnapshotJob": { + "methods": [ + "snapshot_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + }, + "rest": { + "libraryClient": "JobsV1Beta3Client", + "rpcs": { + "AggregatedListJobs": { + "methods": [ + "aggregated_list_jobs" + ] + }, + "CheckActiveJobs": { + "methods": [ + "check_active_jobs" + ] + }, + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + }, + "SnapshotJob": { + "methods": [ + "snapshot_job" + ] + }, + "UpdateJob": { + "methods": [ + "update_job" + ] + } + } + } + } + }, + "MessagesV1Beta3": { + "clients": { + "grpc": { + "libraryClient": "MessagesV1Beta3Client", + "rpcs": { + "ListJobMessages": { + "methods": [ + "list_job_messages" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MessagesV1Beta3AsyncClient", + "rpcs": { + "ListJobMessages": { + "methods": [ + "list_job_messages" + ] + } + } + }, + "rest": { + "libraryClient": "MessagesV1Beta3Client", + "rpcs": { + "ListJobMessages": { + "methods": [ + "list_job_messages" + ] + } + } + } + } + }, + "MetricsV1Beta3": { + "clients": { + "grpc": { + "libraryClient": "MetricsV1Beta3Client", + "rpcs": { + "GetJobExecutionDetails": { + "methods": [ + "get_job_execution_details" + ] + }, + "GetJobMetrics": { + "methods": [ + "get_job_metrics" + ] + }, + "GetStageExecutionDetails": { + "methods": [ + "get_stage_execution_details" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetricsV1Beta3AsyncClient", + "rpcs": { + "GetJobExecutionDetails": { + "methods": [ + "get_job_execution_details" + ] + }, + "GetJobMetrics": { + "methods": [ + "get_job_metrics" + ] + }, + "GetStageExecutionDetails": { + "methods": [ + "get_stage_execution_details" + ] + } + } + }, + "rest": { + "libraryClient": "MetricsV1Beta3Client", + "rpcs": { + "GetJobExecutionDetails": { + "methods": [ + "get_job_execution_details" + ] + }, + "GetJobMetrics": { + "methods": [ + "get_job_metrics" + ] + }, + "GetStageExecutionDetails": { + "methods": [ + "get_stage_execution_details" + ] + } + } + } + } + }, + "SnapshotsV1Beta3": { + "clients": { + "grpc": { + "libraryClient": "SnapshotsV1Beta3Client", + "rpcs": { + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SnapshotsV1Beta3AsyncClient", + "rpcs": { + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + } + } + }, + "rest": { + "libraryClient": "SnapshotsV1Beta3Client", + "rpcs": { + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + } + } + } + } + }, + "TemplatesService": { + "clients": { + "grpc": { + "libraryClient": "TemplatesServiceClient", + "rpcs": { + "CreateJobFromTemplate": { + "methods": [ + "create_job_from_template" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "LaunchTemplate": { + "methods": [ + "launch_template" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TemplatesServiceAsyncClient", + "rpcs": { + "CreateJobFromTemplate": { + "methods": [ + "create_job_from_template" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "LaunchTemplate": { + "methods": [ + "launch_template" + ] + } + } + }, + "rest": { + "libraryClient": "TemplatesServiceClient", + "rpcs": { + "CreateJobFromTemplate": { + "methods": [ + "create_job_from_template" + ] + }, + "GetTemplate": { + "methods": [ + "get_template" + ] + }, + "LaunchTemplate": { + "methods": [ + "launch_template" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_version.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_version.py new file mode 100644 index 0000000..405b1ce --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/py.typed b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/py.typed new file mode 100644 index 0000000..db7ad15 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dataflow-client package uses inline types. diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/__init__.py new file mode 100644 index 0000000..e8e1c38 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py new file mode 100644 index 0000000..107271e --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import FlexTemplatesServiceClient +from .async_client import FlexTemplatesServiceAsyncClient + +__all__ = ( + 'FlexTemplatesServiceClient', + 'FlexTemplatesServiceAsyncClient', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py new file mode 100644 index 0000000..f92bbb5 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from .transports.base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport +from .client import FlexTemplatesServiceClient + + +class FlexTemplatesServiceAsyncClient: + """Provides a service for Flex templates. This feature is not + ready yet. + """ + + _client: FlexTemplatesServiceClient + + DEFAULT_ENDPOINT = FlexTemplatesServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = FlexTemplatesServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(FlexTemplatesServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(FlexTemplatesServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(FlexTemplatesServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(FlexTemplatesServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(FlexTemplatesServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(FlexTemplatesServiceClient.parse_common_organization_path) + common_project_path = staticmethod(FlexTemplatesServiceClient.common_project_path) + parse_common_project_path = staticmethod(FlexTemplatesServiceClient.parse_common_project_path) + common_location_path = staticmethod(FlexTemplatesServiceClient.common_location_path) + parse_common_location_path = staticmethod(FlexTemplatesServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlexTemplatesServiceAsyncClient: The constructed client. + """ + return FlexTemplatesServiceClient.from_service_account_info.__func__(FlexTemplatesServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlexTemplatesServiceAsyncClient: The constructed client. + """ + return FlexTemplatesServiceClient.from_service_account_file.__func__(FlexTemplatesServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return FlexTemplatesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> FlexTemplatesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FlexTemplatesServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(FlexTemplatesServiceClient).get_transport_class, type(FlexTemplatesServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, FlexTemplatesServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the flex templates service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.FlexTemplatesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = FlexTemplatesServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def launch_flex_template(self, + request: Optional[Union[templates.LaunchFlexTemplateRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchFlexTemplateResponse: + r"""Launch a job with a FlexTemplate. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_launch_flex_template(): + # Create a client + client = dataflow_v1beta3.FlexTemplatesServiceAsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchFlexTemplateRequest( + ) + + # Make the request + response = await client.launch_flex_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest, dict]]): + The request object. A request to launch a Cloud Dataflow + job from a FlexTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse: + Response to the request to launch a + job from Flex Template. + + """ + # Create or coerce a protobuf request object. + request = templates.LaunchFlexTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.launch_flex_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "FlexTemplatesServiceAsyncClient", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py new file mode 100644 index 0000000..2c3d168 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from .transports.base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import FlexTemplatesServiceGrpcTransport +from .transports.grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport +from .transports.rest import FlexTemplatesServiceRestTransport + + +class FlexTemplatesServiceClientMeta(type): + """Metaclass for the FlexTemplatesService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[FlexTemplatesServiceTransport]] + _transport_registry["grpc"] = FlexTemplatesServiceGrpcTransport + _transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = FlexTemplatesServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[FlexTemplatesServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class FlexTemplatesServiceClient(metaclass=FlexTemplatesServiceClientMeta): + """Provides a service for Flex templates. This feature is not + ready yet. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlexTemplatesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + FlexTemplatesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> FlexTemplatesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + FlexTemplatesServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, FlexTemplatesServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the flex templates service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, FlexTemplatesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, FlexTemplatesServiceTransport): + # transport is a FlexTemplatesServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def launch_flex_template(self, + request: Optional[Union[templates.LaunchFlexTemplateRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchFlexTemplateResponse: + r"""Launch a job with a FlexTemplate. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_launch_flex_template(): + # Create a client + client = dataflow_v1beta3.FlexTemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchFlexTemplateRequest( + ) + + # Make the request + response = client.launch_flex_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest, dict]): + The request object. A request to launch a Cloud Dataflow + job from a FlexTemplate. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse: + Response to the request to launch a + job from Flex Template. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a templates.LaunchFlexTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, templates.LaunchFlexTemplateRequest): + request = templates.LaunchFlexTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.launch_flex_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "FlexTemplatesServiceClient", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py new file mode 100644 index 0000000..3688dba --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import FlexTemplatesServiceTransport +from .grpc import FlexTemplatesServiceGrpcTransport +from .grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport +from .rest import FlexTemplatesServiceRestTransport +from .rest import FlexTemplatesServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[FlexTemplatesServiceTransport]] +_transport_registry['grpc'] = FlexTemplatesServiceGrpcTransport +_transport_registry['grpc_asyncio'] = FlexTemplatesServiceGrpcAsyncIOTransport +_transport_registry['rest'] = FlexTemplatesServiceRestTransport + +__all__ = ( + 'FlexTemplatesServiceTransport', + 'FlexTemplatesServiceGrpcTransport', + 'FlexTemplatesServiceGrpcAsyncIOTransport', + 'FlexTemplatesServiceRestTransport', + 'FlexTemplatesServiceRestInterceptor', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py new file mode 100644 index 0000000..65c0480 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import templates + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class FlexTemplatesServiceTransport(abc.ABC): + """Abstract transport class for FlexTemplatesService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', + ) + + DEFAULT_HOST: str = 'dataflow.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.launch_flex_template: gapic_v1.method.wrap_method( + self.launch_flex_template, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def launch_flex_template(self) -> Callable[ + [templates.LaunchFlexTemplateRequest], + Union[ + templates.LaunchFlexTemplateResponse, + Awaitable[templates.LaunchFlexTemplateResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'FlexTemplatesServiceTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py new file mode 100644 index 0000000..d953d13 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py @@ -0,0 +1,265 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import templates +from .base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO + + +class FlexTemplatesServiceGrpcTransport(FlexTemplatesServiceTransport): + """gRPC backend transport for FlexTemplatesService. + + Provides a service for Flex templates. This feature is not + ready yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def launch_flex_template(self) -> Callable[ + [templates.LaunchFlexTemplateRequest], + templates.LaunchFlexTemplateResponse]: + r"""Return a callable for the launch flex template method over gRPC. + + Launch a job with a FlexTemplate. + + Returns: + Callable[[~.LaunchFlexTemplateRequest], + ~.LaunchFlexTemplateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'launch_flex_template' not in self._stubs: + self._stubs['launch_flex_template'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.FlexTemplatesService/LaunchFlexTemplate', + request_serializer=templates.LaunchFlexTemplateRequest.serialize, + response_deserializer=templates.LaunchFlexTemplateResponse.deserialize, + ) + return self._stubs['launch_flex_template'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'FlexTemplatesServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..395fc0a --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py @@ -0,0 +1,264 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import templates +from .base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import FlexTemplatesServiceGrpcTransport + + +class FlexTemplatesServiceGrpcAsyncIOTransport(FlexTemplatesServiceTransport): + """gRPC AsyncIO backend transport for FlexTemplatesService. + + Provides a service for Flex templates. This feature is not + ready yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def launch_flex_template(self) -> Callable[ + [templates.LaunchFlexTemplateRequest], + Awaitable[templates.LaunchFlexTemplateResponse]]: + r"""Return a callable for the launch flex template method over gRPC. + + Launch a job with a FlexTemplate. + + Returns: + Callable[[~.LaunchFlexTemplateRequest], + Awaitable[~.LaunchFlexTemplateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'launch_flex_template' not in self._stubs: + self._stubs['launch_flex_template'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.FlexTemplatesService/LaunchFlexTemplate', + request_serializer=templates.LaunchFlexTemplateRequest.serialize, + response_deserializer=templates.LaunchFlexTemplateResponse.deserialize, + ) + return self._stubs['launch_flex_template'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'FlexTemplatesServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py new file mode 100644 index 0000000..e3cd96a --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import templates + +from .base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class FlexTemplatesServiceRestInterceptor: + """Interceptor for FlexTemplatesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the FlexTemplatesServiceRestTransport. + + .. code-block:: python + class MyCustomFlexTemplatesServiceInterceptor(FlexTemplatesServiceRestInterceptor): + def pre_launch_flex_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_launch_flex_template(self, response): + logging.log(f"Received response: {response}") + return response + + transport = FlexTemplatesServiceRestTransport(interceptor=MyCustomFlexTemplatesServiceInterceptor()) + client = FlexTemplatesServiceClient(transport=transport) + + + """ + def pre_launch_flex_template(self, request: templates.LaunchFlexTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[templates.LaunchFlexTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for launch_flex_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the FlexTemplatesService server. + """ + return request, metadata + + def post_launch_flex_template(self, response: templates.LaunchFlexTemplateResponse) -> templates.LaunchFlexTemplateResponse: + """Post-rpc interceptor for launch_flex_template + + Override in a subclass to manipulate the response + after it is returned by the FlexTemplatesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class FlexTemplatesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: FlexTemplatesServiceRestInterceptor + + +class FlexTemplatesServiceRestTransport(FlexTemplatesServiceTransport): + """REST backend transport for FlexTemplatesService. + + Provides a service for Flex templates. This feature is not + ready yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[FlexTemplatesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or FlexTemplatesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _LaunchFlexTemplate(FlexTemplatesServiceRestStub): + def __hash__(self): + return hash("LaunchFlexTemplate") + + def __call__(self, + request: templates.LaunchFlexTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> templates.LaunchFlexTemplateResponse: + r"""Call the launch flex template method over HTTP. + + Args: + request (~.templates.LaunchFlexTemplateRequest): + The request object. A request to launch a Cloud Dataflow + job from a FlexTemplate. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.LaunchFlexTemplateResponse: + Response to the request to launch a + job from Flex Template. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/flexTemplates:launch', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_launch_flex_template(request, metadata) + pb_request = templates.LaunchFlexTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.LaunchFlexTemplateResponse() + pb_resp = templates.LaunchFlexTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_launch_flex_template(resp) + return resp + + @property + def launch_flex_template(self) -> Callable[ + [templates.LaunchFlexTemplateRequest], + templates.LaunchFlexTemplateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LaunchFlexTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'FlexTemplatesServiceRestTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py new file mode 100644 index 0000000..3dac587 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import JobsV1Beta3Client +from .async_client import JobsV1Beta3AsyncClient + +__all__ = ( + 'JobsV1Beta3Client', + 'JobsV1Beta3AsyncClient', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py new file mode 100644 index 0000000..ff51a05 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py @@ -0,0 +1,825 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport +from .client import JobsV1Beta3Client + + +class JobsV1Beta3AsyncClient: + """Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + """ + + _client: JobsV1Beta3Client + + DEFAULT_ENDPOINT = JobsV1Beta3Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = JobsV1Beta3Client.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(JobsV1Beta3Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(JobsV1Beta3Client.parse_common_billing_account_path) + common_folder_path = staticmethod(JobsV1Beta3Client.common_folder_path) + parse_common_folder_path = staticmethod(JobsV1Beta3Client.parse_common_folder_path) + common_organization_path = staticmethod(JobsV1Beta3Client.common_organization_path) + parse_common_organization_path = staticmethod(JobsV1Beta3Client.parse_common_organization_path) + common_project_path = staticmethod(JobsV1Beta3Client.common_project_path) + parse_common_project_path = staticmethod(JobsV1Beta3Client.parse_common_project_path) + common_location_path = staticmethod(JobsV1Beta3Client.common_location_path) + parse_common_location_path = staticmethod(JobsV1Beta3Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobsV1Beta3AsyncClient: The constructed client. + """ + return JobsV1Beta3Client.from_service_account_info.__func__(JobsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobsV1Beta3AsyncClient: The constructed client. + """ + return JobsV1Beta3Client.from_service_account_file.__func__(JobsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return JobsV1Beta3Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> JobsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + JobsV1Beta3Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(JobsV1Beta3Client).get_transport_class, type(JobsV1Beta3Client)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, JobsV1Beta3Transport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the jobs v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.JobsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = JobsV1Beta3Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_job(self, + request: Optional[Union[jobs.CreateJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Creates a Cloud Dataflow job. + + To create a job, we recommend using + ``projects.locations.jobs.create`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.create`` is not recommended, as your job + will always start in ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_create_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobRequest( + ) + + # Make the request + response = await client.create_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.CreateJobRequest, dict]]): + The request object. Request to create a Cloud Dataflow + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + # Create or coerce a protobuf request object. + request = jobs.CreateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job(self, + request: Optional[Union[jobs.GetJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Gets the state of the specified Cloud Dataflow job. + + To get the state of a job, we recommend using + ``projects.locations.jobs.get`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.get`` is not recommended, as you can only + get the state of jobs that are running in ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_get_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobRequest( + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetJobRequest, dict]]): + The request object. Request to get the state of a Cloud + Dataflow job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + # Create or coerce a protobuf request object. + request = jobs.GetJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job(self, + request: Optional[Union[jobs.UpdateJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Updates the state of an existing Cloud Dataflow job. + + To update the state of an existing job, we recommend using + ``projects.locations.jobs.update`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.update`` is not recommended, as you can + only update the state of jobs that are running in + ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_update_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.UpdateJobRequest( + ) + + # Make the request + response = await client.update_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.UpdateJobRequest, dict]]): + The request object. Request to update a Cloud Dataflow + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + # Create or coerce a protobuf request object. + request = jobs.UpdateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_jobs(self, + request: Optional[Union[jobs.ListJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""List the jobs of a project. + + To list the jobs of a project in a region, we recommend using + ``projects.locations.jobs.list`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + To list the all jobs across all regions, use + ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is + not recommended, as you can only get the list of jobs that are + running in ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.ListJobsRequest, dict]]): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = jobs.ListJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_jobs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def aggregated_list_jobs(self, + request: Optional[Union[jobs.ListJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListJobsAsyncPager: + r"""List the jobs of a project across all regions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_aggregated_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.aggregated_list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.ListJobsRequest, dict]]): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsAsyncPager: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = jobs.ListJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.aggregated_list_jobs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AggregatedListJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def check_active_jobs(self, + request: Optional[Union[jobs.CheckActiveJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.CheckActiveJobsResponse: + r"""Check for existence of active jobs in the given + project across all regions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_check_active_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CheckActiveJobsRequest( + ) + + # Make the request + response = await client.check_active_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest, dict]]): + The request object. Request to check is active jobs + exists for a project + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse: + Response for CheckActiveJobsRequest. + """ + # Create or coerce a protobuf request object. + request = jobs.CheckActiveJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.check_active_jobs, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def snapshot_job(self, + request: Optional[Union[jobs.SnapshotJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Snapshot the state of a streaming job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_snapshot_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.SnapshotJobRequest( + ) + + # Make the request + response = await client.snapshot_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.SnapshotJobRequest, dict]]): + The request object. Request to create a snapshot of a + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Snapshot: + Represents a snapshot of a job. + """ + # Create or coerce a protobuf request object. + request = jobs.SnapshotJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.snapshot_job, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "JobsV1Beta3AsyncClient", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py new file mode 100644 index 0000000..37d535d --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py @@ -0,0 +1,1027 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import JobsV1Beta3GrpcTransport +from .transports.grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport +from .transports.rest import JobsV1Beta3RestTransport + + +class JobsV1Beta3ClientMeta(type): + """Metaclass for the JobsV1Beta3 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] + _transport_registry["grpc"] = JobsV1Beta3GrpcTransport + _transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = JobsV1Beta3RestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[JobsV1Beta3Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class JobsV1Beta3Client(metaclass=JobsV1Beta3ClientMeta): + """Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + JobsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> JobsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + JobsV1Beta3Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, JobsV1Beta3Transport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the jobs v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, JobsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, JobsV1Beta3Transport): + # transport is a JobsV1Beta3Transport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_job(self, + request: Optional[Union[jobs.CreateJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Creates a Cloud Dataflow job. + + To create a job, we recommend using + ``projects.locations.jobs.create`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.create`` is not recommended, as your job + will always start in ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_create_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobRequest( + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.CreateJobRequest, dict]): + The request object. Request to create a Cloud Dataflow + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.CreateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.CreateJobRequest): + request = jobs.CreateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job(self, + request: Optional[Union[jobs.GetJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Gets the state of the specified Cloud Dataflow job. + + To get the state of a job, we recommend using + ``projects.locations.jobs.get`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.get`` is not recommended, as you can only + get the state of jobs that are running in ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_get_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobRequest( + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.GetJobRequest, dict]): + The request object. Request to get the state of a Cloud + Dataflow job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.GetJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.GetJobRequest): + request = jobs.GetJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job(self, + request: Optional[Union[jobs.UpdateJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Updates the state of an existing Cloud Dataflow job. + + To update the state of an existing job, we recommend using + ``projects.locations.jobs.update`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.update`` is not recommended, as you can + only update the state of jobs that are running in + ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_update_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.UpdateJobRequest( + ) + + # Make the request + response = client.update_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.UpdateJobRequest, dict]): + The request object. Request to update a Cloud Dataflow + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.UpdateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.UpdateJobRequest): + request = jobs.UpdateJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_jobs(self, + request: Optional[Union[jobs.ListJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""List the jobs of a project. + + To list the jobs of a project in a region, we recommend using + ``projects.locations.jobs.list`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + To list the all jobs across all regions, use + ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is + not recommended, as you can only get the list of jobs that are + running in ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.ListJobsRequest, dict]): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.ListJobsRequest): + request = jobs.ListJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def aggregated_list_jobs(self, + request: Optional[Union[jobs.ListJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AggregatedListJobsPager: + r"""List the jobs of a project across all regions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_aggregated_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.aggregated_list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.ListJobsRequest, dict]): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.ListJobsRequest): + request = jobs.ListJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def check_active_jobs(self, + request: Optional[Union[jobs.CheckActiveJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.CheckActiveJobsResponse: + r"""Check for existence of active jobs in the given + project across all regions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_check_active_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.CheckActiveJobsRequest( + ) + + # Make the request + response = client.check_active_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest, dict]): + The request object. Request to check is active jobs + exists for a project + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse: + Response for CheckActiveJobsRequest. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.CheckActiveJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.CheckActiveJobsRequest): + request = jobs.CheckActiveJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.check_active_jobs] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def snapshot_job(self, + request: Optional[Union[jobs.SnapshotJobRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Snapshot the state of a streaming job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_snapshot_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.SnapshotJobRequest( + ) + + # Make the request + response = client.snapshot_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.SnapshotJobRequest, dict]): + The request object. Request to create a snapshot of a + job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Snapshot: + Represents a snapshot of a job. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a jobs.SnapshotJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, jobs.SnapshotJobRequest): + request = jobs.SnapshotJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.snapshot_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "JobsV1Beta3Client", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py new file mode 100644 index 0000000..2f8687c --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.dataflow_v1beta3.types import jobs + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., jobs.ListJobsResponse], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[jobs.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[jobs.ListJobsResponse]], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[jobs.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AggregatedListJobsPager: + """A pager for iterating through ``aggregated_list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., jobs.ListJobsResponse], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[jobs.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AggregatedListJobsAsyncPager: + """A pager for iterating through ``aggregated_list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AggregatedListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[jobs.ListJobsResponse]], + request: jobs.ListJobsRequest, + response: jobs.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = jobs.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[jobs.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[jobs.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py new file mode 100644 index 0000000..8dcbf32 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import JobsV1Beta3Transport +from .grpc import JobsV1Beta3GrpcTransport +from .grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport +from .rest import JobsV1Beta3RestTransport +from .rest import JobsV1Beta3RestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] +_transport_registry['grpc'] = JobsV1Beta3GrpcTransport +_transport_registry['grpc_asyncio'] = JobsV1Beta3GrpcAsyncIOTransport +_transport_registry['rest'] = JobsV1Beta3RestTransport + +__all__ = ( + 'JobsV1Beta3Transport', + 'JobsV1Beta3GrpcTransport', + 'JobsV1Beta3GrpcAsyncIOTransport', + 'JobsV1Beta3RestTransport', + 'JobsV1Beta3RestInterceptor', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py new file mode 100644 index 0000000..b581aa4 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class JobsV1Beta3Transport(abc.ABC): + """Abstract transport class for JobsV1Beta3.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', + ) + + DEFAULT_HOST: str = 'dataflow.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_job: gapic_v1.method.wrap_method( + self.create_job, + default_timeout=None, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_timeout=None, + client_info=client_info, + ), + self.update_job: gapic_v1.method.wrap_method( + self.update_job, + default_timeout=None, + client_info=client_info, + ), + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_timeout=None, + client_info=client_info, + ), + self.aggregated_list_jobs: gapic_v1.method.wrap_method( + self.aggregated_list_jobs, + default_timeout=None, + client_info=client_info, + ), + self.check_active_jobs: gapic_v1.method.wrap_method( + self.check_active_jobs, + default_timeout=None, + client_info=client_info, + ), + self.snapshot_job: gapic_v1.method.wrap_method( + self.snapshot_job, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_job(self) -> Callable[ + [jobs.CreateJobRequest], + Union[ + jobs.Job, + Awaitable[jobs.Job] + ]]: + raise NotImplementedError() + + @property + def get_job(self) -> Callable[ + [jobs.GetJobRequest], + Union[ + jobs.Job, + Awaitable[jobs.Job] + ]]: + raise NotImplementedError() + + @property + def update_job(self) -> Callable[ + [jobs.UpdateJobRequest], + Union[ + jobs.Job, + Awaitable[jobs.Job] + ]]: + raise NotImplementedError() + + @property + def list_jobs(self) -> Callable[ + [jobs.ListJobsRequest], + Union[ + jobs.ListJobsResponse, + Awaitable[jobs.ListJobsResponse] + ]]: + raise NotImplementedError() + + @property + def aggregated_list_jobs(self) -> Callable[ + [jobs.ListJobsRequest], + Union[ + jobs.ListJobsResponse, + Awaitable[jobs.ListJobsResponse] + ]]: + raise NotImplementedError() + + @property + def check_active_jobs(self) -> Callable[ + [jobs.CheckActiveJobsRequest], + Union[ + jobs.CheckActiveJobsResponse, + Awaitable[jobs.CheckActiveJobsResponse] + ]]: + raise NotImplementedError() + + @property + def snapshot_job(self) -> Callable[ + [jobs.SnapshotJobRequest], + Union[ + snapshots.Snapshot, + Awaitable[snapshots.Snapshot] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'JobsV1Beta3Transport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py new file mode 100644 index 0000000..9949ea8 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from .base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO + + +class JobsV1Beta3GrpcTransport(JobsV1Beta3Transport): + """gRPC backend transport for JobsV1Beta3. + + Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_job(self) -> Callable[ + [jobs.CreateJobRequest], + jobs.Job]: + r"""Return a callable for the create job method over gRPC. + + Creates a Cloud Dataflow job. + + To create a job, we recommend using + ``projects.locations.jobs.create`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.create`` is not recommended, as your job + will always start in ``us-central1``. + + Returns: + Callable[[~.CreateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/CreateJob', + request_serializer=jobs.CreateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs['create_job'] + + @property + def get_job(self) -> Callable[ + [jobs.GetJobRequest], + jobs.Job]: + r"""Return a callable for the get job method over gRPC. + + Gets the state of the specified Cloud Dataflow job. + + To get the state of a job, we recommend using + ``projects.locations.jobs.get`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.get`` is not recommended, as you can only + get the state of jobs that are running in ``us-central1``. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/GetJob', + request_serializer=jobs.GetJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def update_job(self) -> Callable[ + [jobs.UpdateJobRequest], + jobs.Job]: + r"""Return a callable for the update job method over gRPC. + + Updates the state of an existing Cloud Dataflow job. + + To update the state of an existing job, we recommend using + ``projects.locations.jobs.update`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.update`` is not recommended, as you can + only update the state of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.UpdateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job' not in self._stubs: + self._stubs['update_job'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/UpdateJob', + request_serializer=jobs.UpdateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs['update_job'] + + @property + def list_jobs(self) -> Callable[ + [jobs.ListJobsRequest], + jobs.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + List the jobs of a project. + + To list the jobs of a project in a region, we recommend using + ``projects.locations.jobs.list`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + To list the all jobs across all regions, use + ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is + not recommended, as you can only get the list of jobs that are + running in ``us-central1``. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/ListJobs', + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def aggregated_list_jobs(self) -> Callable[ + [jobs.ListJobsRequest], + jobs.ListJobsResponse]: + r"""Return a callable for the aggregated list jobs method over gRPC. + + List the jobs of a project across all regions. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'aggregated_list_jobs' not in self._stubs: + self._stubs['aggregated_list_jobs'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/AggregatedListJobs', + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs['aggregated_list_jobs'] + + @property + def check_active_jobs(self) -> Callable[ + [jobs.CheckActiveJobsRequest], + jobs.CheckActiveJobsResponse]: + r"""Return a callable for the check active jobs method over gRPC. + + Check for existence of active jobs in the given + project across all regions. + + Returns: + Callable[[~.CheckActiveJobsRequest], + ~.CheckActiveJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'check_active_jobs' not in self._stubs: + self._stubs['check_active_jobs'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/CheckActiveJobs', + request_serializer=jobs.CheckActiveJobsRequest.serialize, + response_deserializer=jobs.CheckActiveJobsResponse.deserialize, + ) + return self._stubs['check_active_jobs'] + + @property + def snapshot_job(self) -> Callable[ + [jobs.SnapshotJobRequest], + snapshots.Snapshot]: + r"""Return a callable for the snapshot job method over gRPC. + + Snapshot the state of a streaming job. + + Returns: + Callable[[~.SnapshotJobRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'snapshot_job' not in self._stubs: + self._stubs['snapshot_job'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/SnapshotJob', + request_serializer=jobs.SnapshotJobRequest.serialize, + response_deserializer=snapshots.Snapshot.deserialize, + ) + return self._stubs['snapshot_job'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'JobsV1Beta3GrpcTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py new file mode 100644 index 0000000..5ac3c50 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py @@ -0,0 +1,450 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from .base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .grpc import JobsV1Beta3GrpcTransport + + +class JobsV1Beta3GrpcAsyncIOTransport(JobsV1Beta3Transport): + """gRPC AsyncIO backend transport for JobsV1Beta3. + + Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_job(self) -> Callable[ + [jobs.CreateJobRequest], + Awaitable[jobs.Job]]: + r"""Return a callable for the create job method over gRPC. + + Creates a Cloud Dataflow job. + + To create a job, we recommend using + ``projects.locations.jobs.create`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.create`` is not recommended, as your job + will always start in ``us-central1``. + + Returns: + Callable[[~.CreateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/CreateJob', + request_serializer=jobs.CreateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs['create_job'] + + @property + def get_job(self) -> Callable[ + [jobs.GetJobRequest], + Awaitable[jobs.Job]]: + r"""Return a callable for the get job method over gRPC. + + Gets the state of the specified Cloud Dataflow job. + + To get the state of a job, we recommend using + ``projects.locations.jobs.get`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.get`` is not recommended, as you can only + get the state of jobs that are running in ``us-central1``. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/GetJob', + request_serializer=jobs.GetJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def update_job(self) -> Callable[ + [jobs.UpdateJobRequest], + Awaitable[jobs.Job]]: + r"""Return a callable for the update job method over gRPC. + + Updates the state of an existing Cloud Dataflow job. + + To update the state of an existing job, we recommend using + ``projects.locations.jobs.update`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.update`` is not recommended, as you can + only update the state of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.UpdateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job' not in self._stubs: + self._stubs['update_job'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/UpdateJob', + request_serializer=jobs.UpdateJobRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs['update_job'] + + @property + def list_jobs(self) -> Callable[ + [jobs.ListJobsRequest], + Awaitable[jobs.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + List the jobs of a project. + + To list the jobs of a project in a region, we recommend using + ``projects.locations.jobs.list`` with a [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + To list the all jobs across all regions, use + ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is + not recommended, as you can only get the list of jobs that are + running in ``us-central1``. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/ListJobs', + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def aggregated_list_jobs(self) -> Callable[ + [jobs.ListJobsRequest], + Awaitable[jobs.ListJobsResponse]]: + r"""Return a callable for the aggregated list jobs method over gRPC. + + List the jobs of a project across all regions. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'aggregated_list_jobs' not in self._stubs: + self._stubs['aggregated_list_jobs'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/AggregatedListJobs', + request_serializer=jobs.ListJobsRequest.serialize, + response_deserializer=jobs.ListJobsResponse.deserialize, + ) + return self._stubs['aggregated_list_jobs'] + + @property + def check_active_jobs(self) -> Callable[ + [jobs.CheckActiveJobsRequest], + Awaitable[jobs.CheckActiveJobsResponse]]: + r"""Return a callable for the check active jobs method over gRPC. + + Check for existence of active jobs in the given + project across all regions. + + Returns: + Callable[[~.CheckActiveJobsRequest], + Awaitable[~.CheckActiveJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'check_active_jobs' not in self._stubs: + self._stubs['check_active_jobs'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/CheckActiveJobs', + request_serializer=jobs.CheckActiveJobsRequest.serialize, + response_deserializer=jobs.CheckActiveJobsResponse.deserialize, + ) + return self._stubs['check_active_jobs'] + + @property + def snapshot_job(self) -> Callable[ + [jobs.SnapshotJobRequest], + Awaitable[snapshots.Snapshot]]: + r"""Return a callable for the snapshot job method over gRPC. + + Snapshot the state of a streaming job. + + Returns: + Callable[[~.SnapshotJobRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'snapshot_job' not in self._stubs: + self._stubs['snapshot_job'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.JobsV1Beta3/SnapshotJob', + request_serializer=jobs.SnapshotJobRequest.serialize, + response_deserializer=snapshots.Snapshot.deserialize, + ) + return self._stubs['snapshot_job'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'JobsV1Beta3GrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py new file mode 100644 index 0000000..75f3281 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py @@ -0,0 +1,902 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots + +from .base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class JobsV1Beta3RestInterceptor: + """Interceptor for JobsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the JobsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomJobsV1Beta3Interceptor(JobsV1Beta3RestInterceptor): + def pre_aggregated_list_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_check_active_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_active_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_jobs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_jobs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_snapshot_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_snapshot_job(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_job(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_job(self, response): + logging.log(f"Received response: {response}") + return response + + transport = JobsV1Beta3RestTransport(interceptor=MyCustomJobsV1Beta3Interceptor()) + client = JobsV1Beta3Client(transport=transport) + + + """ + def pre_aggregated_list_jobs(self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for aggregated_list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_aggregated_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: + """Post-rpc interceptor for aggregated_list_jobs + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_create_job(self, request: jobs.CreateJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.CreateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_create_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for create_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_get_job(self, request: jobs.GetJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.GetJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_get_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for get_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_list_jobs(self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_jobs + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: + """Post-rpc interceptor for list_jobs + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_snapshot_job(self, request: jobs.SnapshotJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.SnapshotJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for snapshot_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_snapshot_job(self, response: snapshots.Snapshot) -> snapshots.Snapshot: + """Post-rpc interceptor for snapshot_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_update_job(self, request: jobs.UpdateJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.UpdateJobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_job + + Override in a subclass to manipulate the request or metadata + before they are sent to the JobsV1Beta3 server. + """ + return request, metadata + + def post_update_job(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for update_job + + Override in a subclass to manipulate the response + after it is returned by the JobsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class JobsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: JobsV1Beta3RestInterceptor + + +class JobsV1Beta3RestTransport(JobsV1Beta3Transport): + """REST backend transport for JobsV1Beta3. + + Provides a method to create and modify Google Cloud Dataflow + jobs. A Job is a multi-stage computation graph run by the Cloud + Dataflow service. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[JobsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or JobsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _AggregatedListJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("AggregatedListJobs") + + def __call__(self, + request: jobs.ListJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> jobs.ListJobsResponse: + r"""Call the aggregated list jobs method over HTTP. + + Args: + request (~.jobs.ListJobsRequest): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.ListJobsResponse: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/jobs:aggregated', + }, + ] + request, metadata = self._interceptor.pre_aggregated_list_jobs(request, metadata) + pb_request = jobs.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.ListJobsResponse() + pb_resp = jobs.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_aggregated_list_jobs(resp) + return resp + + class _CheckActiveJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("CheckActiveJobs") + + def __call__(self, + request: jobs.CheckActiveJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> jobs.CheckActiveJobsResponse: + raise RuntimeError( + "Cannot define a method without a valid 'google.api.http' annotation.") + + class _CreateJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("CreateJob") + + def __call__(self, + request: jobs.CreateJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> jobs.Job: + r"""Call the create job method over HTTP. + + Args: + request (~.jobs.CreateJobRequest): + The request object. Request to create a Cloud Dataflow + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs', + 'body': 'job', + }, +{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/jobs', + 'body': 'job', + }, + ] + request, metadata = self._interceptor.pre_create_job(request, metadata) + pb_request = jobs.CreateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job(resp) + return resp + + class _GetJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("GetJob") + + def __call__(self, + request: jobs.GetJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> jobs.Job: + r"""Call the get job method over HTTP. + + Args: + request (~.jobs.GetJobRequest): + The request object. Request to get the state of a Cloud + Dataflow job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}', + }, +{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}', + }, + ] + request, metadata = self._interceptor.pre_get_job(request, metadata) + pb_request = jobs.GetJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job(resp) + return resp + + class _ListJobs(JobsV1Beta3RestStub): + def __hash__(self): + return hash("ListJobs") + + def __call__(self, + request: jobs.ListJobsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> jobs.ListJobsResponse: + r"""Call the list jobs method over HTTP. + + Args: + request (~.jobs.ListJobsRequest): + The request object. Request to list Cloud Dataflow jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.ListJobsResponse: + Response to a request to list Cloud + Dataflow jobs in a project. This might + be a partial response, depending on the + page size in the ListJobsRequest. + However, if the project does not have + any jobs, an instance of + ListJobsResponse is not returned and the + requests's response body is empty {}. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs', + }, +{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/jobs', + }, + ] + request, metadata = self._interceptor.pre_list_jobs(request, metadata) + pb_request = jobs.ListJobsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.ListJobsResponse() + pb_resp = jobs.ListJobsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_jobs(resp) + return resp + + class _SnapshotJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("SnapshotJob") + + def __call__(self, + request: jobs.SnapshotJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> snapshots.Snapshot: + r"""Call the snapshot job method over HTTP. + + Args: + request (~.jobs.SnapshotJobRequest): + The request object. Request to create a snapshot of a + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.Snapshot: + Represents a snapshot of a job. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}:snapshot', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}:snapshot', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_snapshot_job(request, metadata) + pb_request = jobs.SnapshotJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.Snapshot() + pb_resp = snapshots.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_snapshot_job(resp) + return resp + + class _UpdateJob(JobsV1Beta3RestStub): + def __hash__(self): + return hash("UpdateJob") + + def __call__(self, + request: jobs.UpdateJobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> jobs.Job: + r"""Call the update job method over HTTP. + + Args: + request (~.jobs.UpdateJobRequest): + The request object. Request to update a Cloud Dataflow + job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}', + 'body': 'job', + }, +{ + 'method': 'put', + 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}', + 'body': 'job', + }, + ] + request, metadata = self._interceptor.pre_update_job(request, metadata) + pb_request = jobs.UpdateJobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_job(resp) + return resp + + @property + def aggregated_list_jobs(self) -> Callable[ + [jobs.ListJobsRequest], + jobs.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def check_active_jobs(self) -> Callable[ + [jobs.CheckActiveJobsRequest], + jobs.CheckActiveJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckActiveJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_job(self) -> Callable[ + [jobs.CreateJobRequest], + jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job(self) -> Callable[ + [jobs.GetJobRequest], + jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_jobs(self) -> Callable[ + [jobs.ListJobsRequest], + jobs.ListJobsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore + + @property + def snapshot_job(self) -> Callable[ + [jobs.SnapshotJobRequest], + snapshots.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SnapshotJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_job(self) -> Callable[ + [jobs.UpdateJobRequest], + jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'JobsV1Beta3RestTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py new file mode 100644 index 0000000..c3ca155 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MessagesV1Beta3Client +from .async_client import MessagesV1Beta3AsyncClient + +__all__ = ( + 'MessagesV1Beta3Client', + 'MessagesV1Beta3AsyncClient', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py new file mode 100644 index 0000000..906666f --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import messages +from .transports.base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport +from .client import MessagesV1Beta3Client + + +class MessagesV1Beta3AsyncClient: + """The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + """ + + _client: MessagesV1Beta3Client + + DEFAULT_ENDPOINT = MessagesV1Beta3Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MessagesV1Beta3Client.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(MessagesV1Beta3Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(MessagesV1Beta3Client.parse_common_billing_account_path) + common_folder_path = staticmethod(MessagesV1Beta3Client.common_folder_path) + parse_common_folder_path = staticmethod(MessagesV1Beta3Client.parse_common_folder_path) + common_organization_path = staticmethod(MessagesV1Beta3Client.common_organization_path) + parse_common_organization_path = staticmethod(MessagesV1Beta3Client.parse_common_organization_path) + common_project_path = staticmethod(MessagesV1Beta3Client.common_project_path) + parse_common_project_path = staticmethod(MessagesV1Beta3Client.parse_common_project_path) + common_location_path = staticmethod(MessagesV1Beta3Client.common_location_path) + parse_common_location_path = staticmethod(MessagesV1Beta3Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MessagesV1Beta3AsyncClient: The constructed client. + """ + return MessagesV1Beta3Client.from_service_account_info.__func__(MessagesV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MessagesV1Beta3AsyncClient: The constructed client. + """ + return MessagesV1Beta3Client.from_service_account_file.__func__(MessagesV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MessagesV1Beta3Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MessagesV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + MessagesV1Beta3Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(MessagesV1Beta3Client).get_transport_class, type(MessagesV1Beta3Client)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MessagesV1Beta3Transport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the messages v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MessagesV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MessagesV1Beta3Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_job_messages(self, + request: Optional[Union[messages.ListJobMessagesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobMessagesAsyncPager: + r"""Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.messages.list`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.messages.list`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_list_job_messages(): + # Create a client + client = dataflow_v1beta3.MessagesV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobMessagesRequest( + ) + + # Make the request + page_result = client.list_job_messages(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest, dict]]): + The request object. Request to list job messages. + Up to max_results messages will be returned in the time + range specified starting with the oldest messages first. + If no time range is specified the results with start + with the oldest message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager: + Response to a request to list job + messages. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = messages.ListJobMessagesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_messages, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobMessagesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MessagesV1Beta3AsyncClient", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py new file mode 100644 index 0000000..4d4c75a --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py @@ -0,0 +1,500 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import messages +from .transports.base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import MessagesV1Beta3GrpcTransport +from .transports.grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport +from .transports.rest import MessagesV1Beta3RestTransport + + +class MessagesV1Beta3ClientMeta(type): + """Metaclass for the MessagesV1Beta3 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MessagesV1Beta3Transport]] + _transport_registry["grpc"] = MessagesV1Beta3GrpcTransport + _transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = MessagesV1Beta3RestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MessagesV1Beta3Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MessagesV1Beta3Client(metaclass=MessagesV1Beta3ClientMeta): + """The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MessagesV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MessagesV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MessagesV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + MessagesV1Beta3Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MessagesV1Beta3Transport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the messages v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MessagesV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MessagesV1Beta3Transport): + # transport is a MessagesV1Beta3Transport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def list_job_messages(self, + request: Optional[Union[messages.ListJobMessagesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobMessagesPager: + r"""Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.messages.list`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.messages.list`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_list_job_messages(): + # Create a client + client = dataflow_v1beta3.MessagesV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobMessagesRequest( + ) + + # Make the request + page_result = client.list_job_messages(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest, dict]): + The request object. Request to list job messages. + Up to max_results messages will be returned in the time + range specified starting with the oldest messages first. + If no time range is specified the results with start + with the oldest message. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager: + Response to a request to list job + messages. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a messages.ListJobMessagesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, messages.ListJobMessagesRequest): + request = messages.ListJobMessagesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_messages] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobMessagesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MessagesV1Beta3Client", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py new file mode 100644 index 0000000..7ccd2c7 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.dataflow_v1beta3.types import messages + + +class ListJobMessagesPager: + """A pager for iterating through ``list_job_messages`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_messages`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobMessages`` requests and continue to iterate + through the ``job_messages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., messages.ListJobMessagesResponse], + request: messages.ListJobMessagesRequest, + response: messages.ListJobMessagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = messages.ListJobMessagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[messages.ListJobMessagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[messages.JobMessage]: + for page in self.pages: + yield from page.job_messages + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobMessagesAsyncPager: + """A pager for iterating through ``list_job_messages`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_messages`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobMessages`` requests and continue to iterate + through the ``job_messages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[messages.ListJobMessagesResponse]], + request: messages.ListJobMessagesRequest, + response: messages.ListJobMessagesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = messages.ListJobMessagesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[messages.ListJobMessagesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[messages.JobMessage]: + async def async_generator(): + async for page in self.pages: + for response in page.job_messages: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py new file mode 100644 index 0000000..e5a2058 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MessagesV1Beta3Transport +from .grpc import MessagesV1Beta3GrpcTransport +from .grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport +from .rest import MessagesV1Beta3RestTransport +from .rest import MessagesV1Beta3RestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MessagesV1Beta3Transport]] +_transport_registry['grpc'] = MessagesV1Beta3GrpcTransport +_transport_registry['grpc_asyncio'] = MessagesV1Beta3GrpcAsyncIOTransport +_transport_registry['rest'] = MessagesV1Beta3RestTransport + +__all__ = ( + 'MessagesV1Beta3Transport', + 'MessagesV1Beta3GrpcTransport', + 'MessagesV1Beta3GrpcAsyncIOTransport', + 'MessagesV1Beta3RestTransport', + 'MessagesV1Beta3RestInterceptor', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py new file mode 100644 index 0000000..d416b54 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import messages + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class MessagesV1Beta3Transport(abc.ABC): + """Abstract transport class for MessagesV1Beta3.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', + ) + + DEFAULT_HOST: str = 'dataflow.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_job_messages: gapic_v1.method.wrap_method( + self.list_job_messages, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def list_job_messages(self) -> Callable[ + [messages.ListJobMessagesRequest], + Union[ + messages.ListJobMessagesResponse, + Awaitable[messages.ListJobMessagesResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'MessagesV1Beta3Transport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py new file mode 100644 index 0000000..6028ead --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import messages +from .base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO + + +class MessagesV1Beta3GrpcTransport(MessagesV1Beta3Transport): + """gRPC backend transport for MessagesV1Beta3. + + The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_job_messages(self) -> Callable[ + [messages.ListJobMessagesRequest], + messages.ListJobMessagesResponse]: + r"""Return a callable for the list job messages method over gRPC. + + Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.messages.list`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.messages.list`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.ListJobMessagesRequest], + ~.ListJobMessagesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_messages' not in self._stubs: + self._stubs['list_job_messages'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.MessagesV1Beta3/ListJobMessages', + request_serializer=messages.ListJobMessagesRequest.serialize, + response_deserializer=messages.ListJobMessagesResponse.deserialize, + ) + return self._stubs['list_job_messages'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'MessagesV1Beta3GrpcTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py new file mode 100644 index 0000000..0778c7b --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py @@ -0,0 +1,272 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import messages +from .base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO +from .grpc import MessagesV1Beta3GrpcTransport + + +class MessagesV1Beta3GrpcAsyncIOTransport(MessagesV1Beta3Transport): + """gRPC AsyncIO backend transport for MessagesV1Beta3. + + The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_job_messages(self) -> Callable[ + [messages.ListJobMessagesRequest], + Awaitable[messages.ListJobMessagesResponse]]: + r"""Return a callable for the list job messages method over gRPC. + + Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.messages.list`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.messages.list`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.ListJobMessagesRequest], + Awaitable[~.ListJobMessagesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_messages' not in self._stubs: + self._stubs['list_job_messages'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.MessagesV1Beta3/ListJobMessages', + request_serializer=messages.ListJobMessagesRequest.serialize, + response_deserializer=messages.ListJobMessagesResponse.deserialize, + ) + return self._stubs['list_job_messages'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'MessagesV1Beta3GrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py new file mode 100644 index 0000000..29e415e --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py @@ -0,0 +1,292 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import messages + +from .base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MessagesV1Beta3RestInterceptor: + """Interceptor for MessagesV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MessagesV1Beta3RestTransport. + + .. code-block:: python + class MyCustomMessagesV1Beta3Interceptor(MessagesV1Beta3RestInterceptor): + def pre_list_job_messages(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_job_messages(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MessagesV1Beta3RestTransport(interceptor=MyCustomMessagesV1Beta3Interceptor()) + client = MessagesV1Beta3Client(transport=transport) + + + """ + def pre_list_job_messages(self, request: messages.ListJobMessagesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[messages.ListJobMessagesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_job_messages + + Override in a subclass to manipulate the request or metadata + before they are sent to the MessagesV1Beta3 server. + """ + return request, metadata + + def post_list_job_messages(self, response: messages.ListJobMessagesResponse) -> messages.ListJobMessagesResponse: + """Post-rpc interceptor for list_job_messages + + Override in a subclass to manipulate the response + after it is returned by the MessagesV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MessagesV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: MessagesV1Beta3RestInterceptor + + +class MessagesV1Beta3RestTransport(MessagesV1Beta3Transport): + """REST backend transport for MessagesV1Beta3. + + The Dataflow Messages API is used for monitoring the progress + of Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[MessagesV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MessagesV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ListJobMessages(MessagesV1Beta3RestStub): + def __hash__(self): + return hash("ListJobMessages") + + def __call__(self, + request: messages.ListJobMessagesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> messages.ListJobMessagesResponse: + r"""Call the list job messages method over HTTP. + + Args: + request (~.messages.ListJobMessagesRequest): + The request object. Request to list job messages. Up to max_results messages + will be returned in the time range specified starting + with the oldest messages first. If no time range is + specified the results with start with the oldest + message. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.messages.ListJobMessagesResponse: + Response to a request to list job + messages. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/messages', + }, +{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}/messages', + }, + ] + request, metadata = self._interceptor.pre_list_job_messages(request, metadata) + pb_request = messages.ListJobMessagesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = messages.ListJobMessagesResponse() + pb_resp = messages.ListJobMessagesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_job_messages(resp) + return resp + + @property + def list_job_messages(self) -> Callable[ + [messages.ListJobMessagesRequest], + messages.ListJobMessagesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListJobMessages(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'MessagesV1Beta3RestTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py new file mode 100644 index 0000000..14f89b3 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MetricsV1Beta3Client +from .async_client import MetricsV1Beta3AsyncClient + +__all__ = ( + 'MetricsV1Beta3Client', + 'MetricsV1Beta3AsyncClient', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py new file mode 100644 index 0000000..0ebe3ad --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py @@ -0,0 +1,496 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import metrics +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport +from .client import MetricsV1Beta3Client + + +class MetricsV1Beta3AsyncClient: + """The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + """ + + _client: MetricsV1Beta3Client + + DEFAULT_ENDPOINT = MetricsV1Beta3Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetricsV1Beta3Client.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(MetricsV1Beta3Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(MetricsV1Beta3Client.parse_common_billing_account_path) + common_folder_path = staticmethod(MetricsV1Beta3Client.common_folder_path) + parse_common_folder_path = staticmethod(MetricsV1Beta3Client.parse_common_folder_path) + common_organization_path = staticmethod(MetricsV1Beta3Client.common_organization_path) + parse_common_organization_path = staticmethod(MetricsV1Beta3Client.parse_common_organization_path) + common_project_path = staticmethod(MetricsV1Beta3Client.common_project_path) + parse_common_project_path = staticmethod(MetricsV1Beta3Client.parse_common_project_path) + common_location_path = staticmethod(MetricsV1Beta3Client.common_location_path) + parse_common_location_path = staticmethod(MetricsV1Beta3Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsV1Beta3AsyncClient: The constructed client. + """ + return MetricsV1Beta3Client.from_service_account_info.__func__(MetricsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsV1Beta3AsyncClient: The constructed client. + """ + return MetricsV1Beta3Client.from_service_account_file.__func__(MetricsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MetricsV1Beta3Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MetricsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsV1Beta3Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(MetricsV1Beta3Client).get_transport_class, type(MetricsV1Beta3Client)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MetricsV1Beta3Transport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metrics v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetricsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetricsV1Beta3Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def get_job_metrics(self, + request: Optional[Union[metrics.GetJobMetricsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.JobMetrics: + r"""Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.getMetrics`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.getMetrics`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_get_job_metrics(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobMetricsRequest( + ) + + # Make the request + response = await client.get_job_metrics(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest, dict]]): + The request object. Request to get job metrics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.JobMetrics: + JobMetrics contains a collection of + metrics describing the detailed progress + of a Dataflow job. Metrics correspond to + user-defined and system-defined metrics + in the job. + + This resource captures only the most + recent values of each metric; + time-series data can be queried for them + (under the same metric names) from Cloud + Monitoring. + + """ + # Create or coerce a protobuf request object. + request = metrics.GetJobMetricsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_metrics, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_execution_details(self, + request: Optional[Union[metrics.GetJobExecutionDetailsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetJobExecutionDetailsAsyncPager: + r"""Request detailed information about the execution + status of the job. + EXPERIMENTAL. This API is subject to change or removal + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_get_job_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_job_execution_details(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest, dict]]): + The request object. Request to get job execution + details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager: + Information about the execution of a + job. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = metrics.GetJobExecutionDetailsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_execution_details, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.GetJobExecutionDetailsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_stage_execution_details(self, + request: Optional[Union[metrics.GetStageExecutionDetailsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetStageExecutionDetailsAsyncPager: + r"""Request detailed information about the execution + status of a stage of the job. + + EXPERIMENTAL. This API is subject to change or removal + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_get_stage_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetStageExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_stage_execution_details(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest, dict]]): + The request object. Request to get information about a + particular execution stage of a job. Currently only + tracked for Batch jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager: + Information about the workers and + work items within a stage. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = metrics.GetStageExecutionDetailsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_stage_execution_details, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + ("stage_id", request.stage_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.GetStageExecutionDetailsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MetricsV1Beta3AsyncClient", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py new file mode 100644 index 0000000..e0e210d --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py @@ -0,0 +1,694 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.types import metrics +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetricsV1Beta3GrpcTransport +from .transports.grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport +from .transports.rest import MetricsV1Beta3RestTransport + + +class MetricsV1Beta3ClientMeta(type): + """Metaclass for the MetricsV1Beta3 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsV1Beta3Transport]] + _transport_registry["grpc"] = MetricsV1Beta3GrpcTransport + _transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = MetricsV1Beta3RestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MetricsV1Beta3Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetricsV1Beta3Client(metaclass=MetricsV1Beta3ClientMeta): + """The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsV1Beta3Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetricsV1Beta3Transport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metrics v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MetricsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MetricsV1Beta3Transport): + # transport is a MetricsV1Beta3Transport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_job_metrics(self, + request: Optional[Union[metrics.GetJobMetricsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metrics.JobMetrics: + r"""Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.getMetrics`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.getMetrics`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_get_job_metrics(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobMetricsRequest( + ) + + # Make the request + response = client.get_job_metrics(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest, dict]): + The request object. Request to get job metrics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.JobMetrics: + JobMetrics contains a collection of + metrics describing the detailed progress + of a Dataflow job. Metrics correspond to + user-defined and system-defined metrics + in the job. + + This resource captures only the most + recent values of each metric; + time-series data can be queried for them + (under the same metric names) from Cloud + Monitoring. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metrics.GetJobMetricsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metrics.GetJobMetricsRequest): + request = metrics.GetJobMetricsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_metrics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_execution_details(self, + request: Optional[Union[metrics.GetJobExecutionDetailsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetJobExecutionDetailsPager: + r"""Request detailed information about the execution + status of the job. + EXPERIMENTAL. This API is subject to change or removal + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_get_job_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_job_execution_details(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest, dict]): + The request object. Request to get job execution + details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager: + Information about the execution of a + job. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metrics.GetJobExecutionDetailsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metrics.GetJobExecutionDetailsRequest): + request = metrics.GetJobExecutionDetailsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_execution_details] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.GetJobExecutionDetailsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_stage_execution_details(self, + request: Optional[Union[metrics.GetStageExecutionDetailsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.GetStageExecutionDetailsPager: + r"""Request detailed information about the execution + status of a stage of the job. + + EXPERIMENTAL. This API is subject to change or removal + without notice. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_get_stage_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetStageExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_stage_execution_details(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest, dict]): + The request object. Request to get information about a + particular execution stage of a job. Currently only + tracked for Batch jobs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager: + Information about the workers and + work items within a stage. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metrics.GetStageExecutionDetailsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metrics.GetStageExecutionDetailsRequest): + request = metrics.GetStageExecutionDetailsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_stage_execution_details] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + ("stage_id", request.stage_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.GetStageExecutionDetailsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "MetricsV1Beta3Client", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py new file mode 100644 index 0000000..b12c560 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.dataflow_v1beta3.types import metrics + + +class GetJobExecutionDetailsPager: + """A pager for iterating through ``get_job_execution_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` object, and + provides an ``__iter__`` method to iterate through its + ``stages`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``GetJobExecutionDetails`` requests and continue to iterate + through the ``stages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metrics.JobExecutionDetails], + request: metrics.GetJobExecutionDetailsRequest, + response: metrics.JobExecutionDetails, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.JobExecutionDetails): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metrics.GetJobExecutionDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metrics.JobExecutionDetails]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metrics.StageSummary]: + for page in self.pages: + yield from page.stages + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class GetJobExecutionDetailsAsyncPager: + """A pager for iterating through ``get_job_execution_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` object, and + provides an ``__aiter__`` method to iterate through its + ``stages`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``GetJobExecutionDetails`` requests and continue to iterate + through the ``stages`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metrics.JobExecutionDetails]], + request: metrics.GetJobExecutionDetailsRequest, + response: metrics.JobExecutionDetails, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.JobExecutionDetails): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metrics.GetJobExecutionDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metrics.JobExecutionDetails]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[metrics.StageSummary]: + async def async_generator(): + async for page in self.pages: + for response in page.stages: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class GetStageExecutionDetailsPager: + """A pager for iterating through ``get_stage_execution_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` object, and + provides an ``__iter__`` method to iterate through its + ``workers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``GetStageExecutionDetails`` requests and continue to iterate + through the ``workers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., metrics.StageExecutionDetails], + request: metrics.GetStageExecutionDetailsRequest, + response: metrics.StageExecutionDetails, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.StageExecutionDetails): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metrics.GetStageExecutionDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[metrics.StageExecutionDetails]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[metrics.WorkerDetails]: + for page in self.pages: + yield from page.workers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class GetStageExecutionDetailsAsyncPager: + """A pager for iterating through ``get_stage_execution_details`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` object, and + provides an ``__aiter__`` method to iterate through its + ``workers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``GetStageExecutionDetails`` requests and continue to iterate + through the ``workers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[metrics.StageExecutionDetails]], + request: metrics.GetStageExecutionDetailsRequest, + response: metrics.StageExecutionDetails, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest): + The initial request object. + response (google.cloud.dataflow_v1beta3.types.StageExecutionDetails): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = metrics.GetStageExecutionDetailsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[metrics.StageExecutionDetails]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[metrics.WorkerDetails]: + async def async_generator(): + async for page in self.pages: + for response in page.workers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py new file mode 100644 index 0000000..9e9fd57 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetricsV1Beta3Transport +from .grpc import MetricsV1Beta3GrpcTransport +from .grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport +from .rest import MetricsV1Beta3RestTransport +from .rest import MetricsV1Beta3RestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetricsV1Beta3Transport]] +_transport_registry['grpc'] = MetricsV1Beta3GrpcTransport +_transport_registry['grpc_asyncio'] = MetricsV1Beta3GrpcAsyncIOTransport +_transport_registry['rest'] = MetricsV1Beta3RestTransport + +__all__ = ( + 'MetricsV1Beta3Transport', + 'MetricsV1Beta3GrpcTransport', + 'MetricsV1Beta3GrpcAsyncIOTransport', + 'MetricsV1Beta3RestTransport', + 'MetricsV1Beta3RestInterceptor', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py new file mode 100644 index 0000000..54c9b1f --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import metrics + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class MetricsV1Beta3Transport(abc.ABC): + """Abstract transport class for MetricsV1Beta3.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', + ) + + DEFAULT_HOST: str = 'dataflow.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_job_metrics: gapic_v1.method.wrap_method( + self.get_job_metrics, + default_timeout=None, + client_info=client_info, + ), + self.get_job_execution_details: gapic_v1.method.wrap_method( + self.get_job_execution_details, + default_timeout=None, + client_info=client_info, + ), + self.get_stage_execution_details: gapic_v1.method.wrap_method( + self.get_stage_execution_details, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_job_metrics(self) -> Callable[ + [metrics.GetJobMetricsRequest], + Union[ + metrics.JobMetrics, + Awaitable[metrics.JobMetrics] + ]]: + raise NotImplementedError() + + @property + def get_job_execution_details(self) -> Callable[ + [metrics.GetJobExecutionDetailsRequest], + Union[ + metrics.JobExecutionDetails, + Awaitable[metrics.JobExecutionDetails] + ]]: + raise NotImplementedError() + + @property + def get_stage_execution_details(self) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], + Union[ + metrics.StageExecutionDetails, + Awaitable[metrics.StageExecutionDetails] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'MetricsV1Beta3Transport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py new file mode 100644 index 0000000..14561c8 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import metrics +from .base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO + + +class MetricsV1Beta3GrpcTransport(MetricsV1Beta3Transport): + """gRPC backend transport for MetricsV1Beta3. + + The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def get_job_metrics(self) -> Callable[ + [metrics.GetJobMetricsRequest], + metrics.JobMetrics]: + r"""Return a callable for the get job metrics method over gRPC. + + Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.getMetrics`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.getMetrics`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.GetJobMetricsRequest], + ~.JobMetrics]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_metrics' not in self._stubs: + self._stubs['get_job_metrics'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.MetricsV1Beta3/GetJobMetrics', + request_serializer=metrics.GetJobMetricsRequest.serialize, + response_deserializer=metrics.JobMetrics.deserialize, + ) + return self._stubs['get_job_metrics'] + + @property + def get_job_execution_details(self) -> Callable[ + [metrics.GetJobExecutionDetailsRequest], + metrics.JobExecutionDetails]: + r"""Return a callable for the get job execution details method over gRPC. + + Request detailed information about the execution + status of the job. + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Returns: + Callable[[~.GetJobExecutionDetailsRequest], + ~.JobExecutionDetails]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_execution_details' not in self._stubs: + self._stubs['get_job_execution_details'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.MetricsV1Beta3/GetJobExecutionDetails', + request_serializer=metrics.GetJobExecutionDetailsRequest.serialize, + response_deserializer=metrics.JobExecutionDetails.deserialize, + ) + return self._stubs['get_job_execution_details'] + + @property + def get_stage_execution_details(self) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], + metrics.StageExecutionDetails]: + r"""Return a callable for the get stage execution details method over gRPC. + + Request detailed information about the execution + status of a stage of the job. + + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Returns: + Callable[[~.GetStageExecutionDetailsRequest], + ~.StageExecutionDetails]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stage_execution_details' not in self._stubs: + self._stubs['get_stage_execution_details'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.MetricsV1Beta3/GetStageExecutionDetails', + request_serializer=metrics.GetStageExecutionDetailsRequest.serialize, + response_deserializer=metrics.StageExecutionDetails.deserialize, + ) + return self._stubs['get_stage_execution_details'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'MetricsV1Beta3GrpcTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py new file mode 100644 index 0000000..280f1de --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py @@ -0,0 +1,331 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import metrics +from .base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .grpc import MetricsV1Beta3GrpcTransport + + +class MetricsV1Beta3GrpcAsyncIOTransport(MetricsV1Beta3Transport): + """gRPC AsyncIO backend transport for MetricsV1Beta3. + + The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_job_metrics(self) -> Callable[ + [metrics.GetJobMetricsRequest], + Awaitable[metrics.JobMetrics]]: + r"""Return a callable for the get job metrics method over gRPC. + + Request the job status. + + To request the status of a job, we recommend using + ``projects.locations.jobs.getMetrics`` with a [regional + endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). + Using ``projects.jobs.getMetrics`` is not recommended, as you + can only request the status of jobs that are running in + ``us-central1``. + + Returns: + Callable[[~.GetJobMetricsRequest], + Awaitable[~.JobMetrics]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_metrics' not in self._stubs: + self._stubs['get_job_metrics'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.MetricsV1Beta3/GetJobMetrics', + request_serializer=metrics.GetJobMetricsRequest.serialize, + response_deserializer=metrics.JobMetrics.deserialize, + ) + return self._stubs['get_job_metrics'] + + @property + def get_job_execution_details(self) -> Callable[ + [metrics.GetJobExecutionDetailsRequest], + Awaitable[metrics.JobExecutionDetails]]: + r"""Return a callable for the get job execution details method over gRPC. + + Request detailed information about the execution + status of the job. + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Returns: + Callable[[~.GetJobExecutionDetailsRequest], + Awaitable[~.JobExecutionDetails]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_execution_details' not in self._stubs: + self._stubs['get_job_execution_details'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.MetricsV1Beta3/GetJobExecutionDetails', + request_serializer=metrics.GetJobExecutionDetailsRequest.serialize, + response_deserializer=metrics.JobExecutionDetails.deserialize, + ) + return self._stubs['get_job_execution_details'] + + @property + def get_stage_execution_details(self) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], + Awaitable[metrics.StageExecutionDetails]]: + r"""Return a callable for the get stage execution details method over gRPC. + + Request detailed information about the execution + status of a stage of the job. + + EXPERIMENTAL. This API is subject to change or removal + without notice. + + Returns: + Callable[[~.GetStageExecutionDetailsRequest], + Awaitable[~.StageExecutionDetails]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stage_execution_details' not in self._stubs: + self._stubs['get_stage_execution_details'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.MetricsV1Beta3/GetStageExecutionDetails', + request_serializer=metrics.GetStageExecutionDetailsRequest.serialize, + response_deserializer=metrics.StageExecutionDetails.deserialize, + ) + return self._stubs['get_stage_execution_details'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'MetricsV1Beta3GrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py new file mode 100644 index 0000000..70be7e4 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import metrics + +from .base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MetricsV1Beta3RestInterceptor: + """Interceptor for MetricsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MetricsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomMetricsV1Beta3Interceptor(MetricsV1Beta3RestInterceptor): + def pre_get_job_execution_details(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_execution_details(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_job_metrics(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_job_metrics(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_stage_execution_details(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_stage_execution_details(self, response): + logging.log(f"Received response: {response}") + return response + + transport = MetricsV1Beta3RestTransport(interceptor=MyCustomMetricsV1Beta3Interceptor()) + client = MetricsV1Beta3Client(transport=transport) + + + """ + def pre_get_job_execution_details(self, request: metrics.GetJobExecutionDetailsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[metrics.GetJobExecutionDetailsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_execution_details + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_job_execution_details(self, response: metrics.JobExecutionDetails) -> metrics.JobExecutionDetails: + """Post-rpc interceptor for get_job_execution_details + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_get_job_metrics(self, request: metrics.GetJobMetricsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[metrics.GetJobMetricsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_job_metrics + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_job_metrics(self, response: metrics.JobMetrics) -> metrics.JobMetrics: + """Post-rpc interceptor for get_job_metrics + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_get_stage_execution_details(self, request: metrics.GetStageExecutionDetailsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[metrics.GetStageExecutionDetailsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_stage_execution_details + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsV1Beta3 server. + """ + return request, metadata + + def post_get_stage_execution_details(self, response: metrics.StageExecutionDetails) -> metrics.StageExecutionDetails: + """Post-rpc interceptor for get_stage_execution_details + + Override in a subclass to manipulate the response + after it is returned by the MetricsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MetricsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: MetricsV1Beta3RestInterceptor + + +class MetricsV1Beta3RestTransport(MetricsV1Beta3Transport): + """REST backend transport for MetricsV1Beta3. + + The Dataflow Metrics API lets you monitor the progress of + Dataflow jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[MetricsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MetricsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetJobExecutionDetails(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetJobExecutionDetails") + + def __call__(self, + request: metrics.GetJobExecutionDetailsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> metrics.JobExecutionDetails: + r"""Call the get job execution details method over HTTP. + + Args: + request (~.metrics.GetJobExecutionDetailsRequest): + The request object. Request to get job execution details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.JobExecutionDetails: + Information about the execution of a + job. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/executionDetails', + }, + ] + request, metadata = self._interceptor.pre_get_job_execution_details(request, metadata) + pb_request = metrics.GetJobExecutionDetailsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.JobExecutionDetails() + pb_resp = metrics.JobExecutionDetails.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_execution_details(resp) + return resp + + class _GetJobMetrics(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetJobMetrics") + + def __call__(self, + request: metrics.GetJobMetricsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> metrics.JobMetrics: + r"""Call the get job metrics method over HTTP. + + Args: + request (~.metrics.GetJobMetricsRequest): + The request object. Request to get job metrics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.JobMetrics: + JobMetrics contains a collection of + metrics describing the detailed progress + of a Dataflow job. Metrics correspond to + user-defined and system-defined metrics + in the job. + + This resource captures only the most + recent values of each metric; + time-series data can be queried for them + (under the same metric names) from Cloud + Monitoring. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/metrics', + }, +{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}/metrics', + }, + ] + request, metadata = self._interceptor.pre_get_job_metrics(request, metadata) + pb_request = metrics.GetJobMetricsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.JobMetrics() + pb_resp = metrics.JobMetrics.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_job_metrics(resp) + return resp + + class _GetStageExecutionDetails(MetricsV1Beta3RestStub): + def __hash__(self): + return hash("GetStageExecutionDetails") + + def __call__(self, + request: metrics.GetStageExecutionDetailsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> metrics.StageExecutionDetails: + r"""Call the get stage execution + details method over HTTP. + + Args: + request (~.metrics.GetStageExecutionDetailsRequest): + The request object. Request to get information about a + particular execution stage of a job. + Currently only tracked for Batch jobs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.metrics.StageExecutionDetails: + Information about the workers and + work items within a stage. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/stages/{stage_id}/executionDetails', + }, + ] + request, metadata = self._interceptor.pre_get_stage_execution_details(request, metadata) + pb_request = metrics.GetStageExecutionDetailsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = metrics.StageExecutionDetails() + pb_resp = metrics.StageExecutionDetails.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_stage_execution_details(resp) + return resp + + @property + def get_job_execution_details(self) -> Callable[ + [metrics.GetJobExecutionDetailsRequest], + metrics.JobExecutionDetails]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobExecutionDetails(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_job_metrics(self) -> Callable[ + [metrics.GetJobMetricsRequest], + metrics.JobMetrics]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetJobMetrics(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_stage_execution_details(self) -> Callable[ + [metrics.GetStageExecutionDetailsRequest], + metrics.StageExecutionDetails]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetStageExecutionDetails(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'MetricsV1Beta3RestTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py new file mode 100644 index 0000000..85c4cd1 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SnapshotsV1Beta3Client +from .async_client import SnapshotsV1Beta3AsyncClient + +__all__ = ( + 'SnapshotsV1Beta3Client', + 'SnapshotsV1Beta3AsyncClient', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py new file mode 100644 index 0000000..1c6acc5 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py @@ -0,0 +1,437 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport +from .client import SnapshotsV1Beta3Client + + +class SnapshotsV1Beta3AsyncClient: + """Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + """ + + _client: SnapshotsV1Beta3Client + + DEFAULT_ENDPOINT = SnapshotsV1Beta3Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SnapshotsV1Beta3Client.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(SnapshotsV1Beta3Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(SnapshotsV1Beta3Client.parse_common_billing_account_path) + common_folder_path = staticmethod(SnapshotsV1Beta3Client.common_folder_path) + parse_common_folder_path = staticmethod(SnapshotsV1Beta3Client.parse_common_folder_path) + common_organization_path = staticmethod(SnapshotsV1Beta3Client.common_organization_path) + parse_common_organization_path = staticmethod(SnapshotsV1Beta3Client.parse_common_organization_path) + common_project_path = staticmethod(SnapshotsV1Beta3Client.common_project_path) + parse_common_project_path = staticmethod(SnapshotsV1Beta3Client.parse_common_project_path) + common_location_path = staticmethod(SnapshotsV1Beta3Client.common_location_path) + parse_common_location_path = staticmethod(SnapshotsV1Beta3Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsV1Beta3AsyncClient: The constructed client. + """ + return SnapshotsV1Beta3Client.from_service_account_info.__func__(SnapshotsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsV1Beta3AsyncClient: The constructed client. + """ + return SnapshotsV1Beta3Client.from_service_account_file.__func__(SnapshotsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SnapshotsV1Beta3Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SnapshotsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + SnapshotsV1Beta3Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(SnapshotsV1Beta3Client).get_transport_class, type(SnapshotsV1Beta3Client)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SnapshotsV1Beta3Transport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the snapshots v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SnapshotsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SnapshotsV1Beta3Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def get_snapshot(self, + request: Optional[Union[snapshots.GetSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Gets information about a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_get_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetSnapshotRequest( + ) + + # Make the request + response = await client.get_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetSnapshotRequest, dict]]): + The request object. Request to get information about a + snapshot + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Snapshot: + Represents a snapshot of a job. + """ + # Create or coerce a protobuf request object. + request = snapshots.GetSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("snapshot_id", request.snapshot_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_snapshot(self, + request: Optional[Union[snapshots.DeleteSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.DeleteSnapshotResponse: + r"""Deletes a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_delete_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.DeleteSnapshotRequest( + ) + + # Make the request + response = await client.delete_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest, dict]]): + The request object. Request to delete a snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse: + Response from deleting a snapshot. + """ + # Create or coerce a protobuf request object. + request = snapshots.DeleteSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_snapshot, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("snapshot_id", request.snapshot_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_snapshots(self, + request: Optional[Union[snapshots.ListSnapshotsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.ListSnapshotsResponse: + r"""Lists snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_list_snapshots(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListSnapshotsRequest( + ) + + # Make the request + response = await client.list_snapshots(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest, dict]]): + The request object. Request to list snapshots. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse: + List of snapshots. + """ + # Create or coerce a protobuf request object. + request = snapshots.ListSnapshotsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_snapshots, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "SnapshotsV1Beta3AsyncClient", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py new file mode 100644 index 0000000..8e5f2e0 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py @@ -0,0 +1,635 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import SnapshotsV1Beta3GrpcTransport +from .transports.grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport +from .transports.rest import SnapshotsV1Beta3RestTransport + + +class SnapshotsV1Beta3ClientMeta(type): + """Metaclass for the SnapshotsV1Beta3 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsV1Beta3Transport]] + _transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport + _transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport + _transport_registry["rest"] = SnapshotsV1Beta3RestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[SnapshotsV1Beta3Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SnapshotsV1Beta3Client(metaclass=SnapshotsV1Beta3ClientMeta): + """Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SnapshotsV1Beta3Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SnapshotsV1Beta3Transport: + """Returns the transport used by the client instance. + + Returns: + SnapshotsV1Beta3Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, SnapshotsV1Beta3Transport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the snapshots v1 beta3 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SnapshotsV1Beta3Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SnapshotsV1Beta3Transport): + # transport is a SnapshotsV1Beta3Transport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def get_snapshot(self, + request: Optional[Union[snapshots.GetSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.Snapshot: + r"""Gets information about a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_get_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetSnapshotRequest( + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.GetSnapshotRequest, dict]): + The request object. Request to get information about a + snapshot + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Snapshot: + Represents a snapshot of a job. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a snapshots.GetSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, snapshots.GetSnapshotRequest): + request = snapshots.GetSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("snapshot_id", request.snapshot_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_snapshot(self, + request: Optional[Union[snapshots.DeleteSnapshotRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.DeleteSnapshotResponse: + r"""Deletes a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_delete_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.DeleteSnapshotRequest( + ) + + # Make the request + response = client.delete_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest, dict]): + The request object. Request to delete a snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse: + Response from deleting a snapshot. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a snapshots.DeleteSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, snapshots.DeleteSnapshotRequest): + request = snapshots.DeleteSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("snapshot_id", request.snapshot_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_snapshots(self, + request: Optional[Union[snapshots.ListSnapshotsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> snapshots.ListSnapshotsResponse: + r"""Lists snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_list_snapshots(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListSnapshotsRequest( + ) + + # Make the request + response = client.list_snapshots(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest, dict]): + The request object. Request to list snapshots. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse: + List of snapshots. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a snapshots.ListSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, snapshots.ListSnapshotsRequest): + request = snapshots.ListSnapshotsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + ("job_id", request.job_id), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "SnapshotsV1Beta3Client", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py new file mode 100644 index 0000000..87e928c --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SnapshotsV1Beta3Transport +from .grpc import SnapshotsV1Beta3GrpcTransport +from .grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport +from .rest import SnapshotsV1Beta3RestTransport +from .rest import SnapshotsV1Beta3RestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsV1Beta3Transport]] +_transport_registry['grpc'] = SnapshotsV1Beta3GrpcTransport +_transport_registry['grpc_asyncio'] = SnapshotsV1Beta3GrpcAsyncIOTransport +_transport_registry['rest'] = SnapshotsV1Beta3RestTransport + +__all__ = ( + 'SnapshotsV1Beta3Transport', + 'SnapshotsV1Beta3GrpcTransport', + 'SnapshotsV1Beta3GrpcAsyncIOTransport', + 'SnapshotsV1Beta3RestTransport', + 'SnapshotsV1Beta3RestInterceptor', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py new file mode 100644 index 0000000..c8ebbbc --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class SnapshotsV1Beta3Transport(abc.ABC): + """Abstract transport class for SnapshotsV1Beta3.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', + ) + + DEFAULT_HOST: str = 'dataflow.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_snapshot: gapic_v1.method.wrap_method( + self.get_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.delete_snapshot: gapic_v1.method.wrap_method( + self.delete_snapshot, + default_timeout=None, + client_info=client_info, + ), + self.list_snapshots: gapic_v1.method.wrap_method( + self.list_snapshots, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get_snapshot(self) -> Callable[ + [snapshots.GetSnapshotRequest], + Union[ + snapshots.Snapshot, + Awaitable[snapshots.Snapshot] + ]]: + raise NotImplementedError() + + @property + def delete_snapshot(self) -> Callable[ + [snapshots.DeleteSnapshotRequest], + Union[ + snapshots.DeleteSnapshotResponse, + Awaitable[snapshots.DeleteSnapshotResponse] + ]]: + raise NotImplementedError() + + @property + def list_snapshots(self) -> Callable[ + [snapshots.ListSnapshotsRequest], + Union[ + snapshots.ListSnapshotsResponse, + Awaitable[snapshots.ListSnapshotsResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'SnapshotsV1Beta3Transport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py new file mode 100644 index 0000000..8499964 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots +from .base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO + + +class SnapshotsV1Beta3GrpcTransport(SnapshotsV1Beta3Transport): + """gRPC backend transport for SnapshotsV1Beta3. + + Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def get_snapshot(self) -> Callable[ + [snapshots.GetSnapshotRequest], + snapshots.Snapshot]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets information about a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_snapshot' not in self._stubs: + self._stubs['get_snapshot'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.SnapshotsV1Beta3/GetSnapshot', + request_serializer=snapshots.GetSnapshotRequest.serialize, + response_deserializer=snapshots.Snapshot.deserialize, + ) + return self._stubs['get_snapshot'] + + @property + def delete_snapshot(self) -> Callable[ + [snapshots.DeleteSnapshotRequest], + snapshots.DeleteSnapshotResponse]: + r"""Return a callable for the delete snapshot method over gRPC. + + Deletes a snapshot. + + Returns: + Callable[[~.DeleteSnapshotRequest], + ~.DeleteSnapshotResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_snapshot' not in self._stubs: + self._stubs['delete_snapshot'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.SnapshotsV1Beta3/DeleteSnapshot', + request_serializer=snapshots.DeleteSnapshotRequest.serialize, + response_deserializer=snapshots.DeleteSnapshotResponse.deserialize, + ) + return self._stubs['delete_snapshot'] + + @property + def list_snapshots(self) -> Callable[ + [snapshots.ListSnapshotsRequest], + snapshots.ListSnapshotsResponse]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists snapshots. + + Returns: + Callable[[~.ListSnapshotsRequest], + ~.ListSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_snapshots' not in self._stubs: + self._stubs['list_snapshots'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.SnapshotsV1Beta3/ListSnapshots', + request_serializer=snapshots.ListSnapshotsRequest.serialize, + response_deserializer=snapshots.ListSnapshotsResponse.deserialize, + ) + return self._stubs['list_snapshots'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'SnapshotsV1Beta3GrpcTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py new file mode 100644 index 0000000..ea290a2 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py @@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import snapshots +from .base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO +from .grpc import SnapshotsV1Beta3GrpcTransport + + +class SnapshotsV1Beta3GrpcAsyncIOTransport(SnapshotsV1Beta3Transport): + """gRPC AsyncIO backend transport for SnapshotsV1Beta3. + + Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def get_snapshot(self) -> Callable[ + [snapshots.GetSnapshotRequest], + Awaitable[snapshots.Snapshot]]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets information about a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_snapshot' not in self._stubs: + self._stubs['get_snapshot'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.SnapshotsV1Beta3/GetSnapshot', + request_serializer=snapshots.GetSnapshotRequest.serialize, + response_deserializer=snapshots.Snapshot.deserialize, + ) + return self._stubs['get_snapshot'] + + @property + def delete_snapshot(self) -> Callable[ + [snapshots.DeleteSnapshotRequest], + Awaitable[snapshots.DeleteSnapshotResponse]]: + r"""Return a callable for the delete snapshot method over gRPC. + + Deletes a snapshot. + + Returns: + Callable[[~.DeleteSnapshotRequest], + Awaitable[~.DeleteSnapshotResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_snapshot' not in self._stubs: + self._stubs['delete_snapshot'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.SnapshotsV1Beta3/DeleteSnapshot', + request_serializer=snapshots.DeleteSnapshotRequest.serialize, + response_deserializer=snapshots.DeleteSnapshotResponse.deserialize, + ) + return self._stubs['delete_snapshot'] + + @property + def list_snapshots(self) -> Callable[ + [snapshots.ListSnapshotsRequest], + Awaitable[snapshots.ListSnapshotsResponse]]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists snapshots. + + Returns: + Callable[[~.ListSnapshotsRequest], + Awaitable[~.ListSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_snapshots' not in self._stubs: + self._stubs['list_snapshots'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.SnapshotsV1Beta3/ListSnapshots', + request_serializer=snapshots.ListSnapshotsRequest.serialize, + response_deserializer=snapshots.ListSnapshotsResponse.deserialize, + ) + return self._stubs['list_snapshots'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'SnapshotsV1Beta3GrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py new file mode 100644 index 0000000..bb10151 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py @@ -0,0 +1,503 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import snapshots + +from .base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SnapshotsV1Beta3RestInterceptor: + """Interceptor for SnapshotsV1Beta3. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SnapshotsV1Beta3RestTransport. + + .. code-block:: python + class MyCustomSnapshotsV1Beta3Interceptor(SnapshotsV1Beta3RestInterceptor): + def pre_delete_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_snapshots(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_snapshots(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SnapshotsV1Beta3RestTransport(interceptor=MyCustomSnapshotsV1Beta3Interceptor()) + client = SnapshotsV1Beta3Client(transport=transport) + + + """ + def pre_delete_snapshot(self, request: snapshots.DeleteSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[snapshots.DeleteSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_delete_snapshot(self, response: snapshots.DeleteSnapshotResponse) -> snapshots.DeleteSnapshotResponse: + """Post-rpc interceptor for delete_snapshot + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_get_snapshot(self, request: snapshots.GetSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[snapshots.GetSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_get_snapshot(self, response: snapshots.Snapshot) -> snapshots.Snapshot: + """Post-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + def pre_list_snapshots(self, request: snapshots.ListSnapshotsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[snapshots.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the request or metadata + before they are sent to the SnapshotsV1Beta3 server. + """ + return request, metadata + + def post_list_snapshots(self, response: snapshots.ListSnapshotsResponse) -> snapshots.ListSnapshotsResponse: + """Post-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the response + after it is returned by the SnapshotsV1Beta3 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SnapshotsV1Beta3RestStub: + _session: AuthorizedSession + _host: str + _interceptor: SnapshotsV1Beta3RestInterceptor + + +class SnapshotsV1Beta3RestTransport(SnapshotsV1Beta3Transport): + """REST backend transport for SnapshotsV1Beta3. + + Provides methods to manage snapshots of Google Cloud Dataflow + jobs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[SnapshotsV1Beta3RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SnapshotsV1Beta3RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DeleteSnapshot(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("DeleteSnapshot") + + def __call__(self, + request: snapshots.DeleteSnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> snapshots.DeleteSnapshotResponse: + r"""Call the delete snapshot method over HTTP. + + Args: + request (~.snapshots.DeleteSnapshotRequest): + The request object. Request to delete a snapshot. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.DeleteSnapshotResponse: + Response from deleting a snapshot. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/snapshots/{snapshot_id}', + }, +{ + 'method': 'delete', + 'uri': '/v1b3/projects/{project_id}/snapshots', + }, + ] + request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) + pb_request = snapshots.DeleteSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.DeleteSnapshotResponse() + pb_resp = snapshots.DeleteSnapshotResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_snapshot(resp) + return resp + + class _GetSnapshot(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("GetSnapshot") + + def __call__(self, + request: snapshots.GetSnapshotRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> snapshots.Snapshot: + r"""Call the get snapshot method over HTTP. + + Args: + request (~.snapshots.GetSnapshotRequest): + The request object. Request to get information about a + snapshot + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.Snapshot: + Represents a snapshot of a job. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/snapshots/{snapshot_id}', + }, +{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/snapshots/{snapshot_id}', + }, + ] + request, metadata = self._interceptor.pre_get_snapshot(request, metadata) + pb_request = snapshots.GetSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.Snapshot() + pb_resp = snapshots.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_snapshot(resp) + return resp + + class _ListSnapshots(SnapshotsV1Beta3RestStub): + def __hash__(self): + return hash("ListSnapshots") + + def __call__(self, + request: snapshots.ListSnapshotsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> snapshots.ListSnapshotsResponse: + r"""Call the list snapshots method over HTTP. + + Args: + request (~.snapshots.ListSnapshotsRequest): + The request object. Request to list snapshots. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.snapshots.ListSnapshotsResponse: + List of snapshots. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/snapshots', + }, +{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/snapshots', + }, +{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/snapshots', + }, + ] + request, metadata = self._interceptor.pre_list_snapshots(request, metadata) + pb_request = snapshots.ListSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = snapshots.ListSnapshotsResponse() + pb_resp = snapshots.ListSnapshotsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_snapshots(resp) + return resp + + @property + def delete_snapshot(self) -> Callable[ + [snapshots.DeleteSnapshotRequest], + snapshots.DeleteSnapshotResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_snapshot(self) -> Callable[ + [snapshots.GetSnapshotRequest], + snapshots.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_snapshots(self) -> Callable[ + [snapshots.ListSnapshotsRequest], + snapshots.ListSnapshotsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSnapshots(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'SnapshotsV1Beta3RestTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py new file mode 100644 index 0000000..3458541 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TemplatesServiceClient +from .async_client import TemplatesServiceAsyncClient + +__all__ = ( + 'TemplatesServiceClient', + 'TemplatesServiceAsyncClient', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py new file mode 100644 index 0000000..892fc1e --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport +from .client import TemplatesServiceClient + + +class TemplatesServiceAsyncClient: + """Provides a method to create Cloud Dataflow jobs from + templates. + """ + + _client: TemplatesServiceClient + + DEFAULT_ENDPOINT = TemplatesServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TemplatesServiceClient.DEFAULT_MTLS_ENDPOINT + + common_billing_account_path = staticmethod(TemplatesServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(TemplatesServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(TemplatesServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(TemplatesServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(TemplatesServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(TemplatesServiceClient.parse_common_organization_path) + common_project_path = staticmethod(TemplatesServiceClient.common_project_path) + parse_common_project_path = staticmethod(TemplatesServiceClient.parse_common_project_path) + common_location_path = staticmethod(TemplatesServiceClient.common_location_path) + parse_common_location_path = staticmethod(TemplatesServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TemplatesServiceAsyncClient: The constructed client. + """ + return TemplatesServiceClient.from_service_account_info.__func__(TemplatesServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TemplatesServiceAsyncClient: The constructed client. + """ + return TemplatesServiceClient.from_service_account_file.__func__(TemplatesServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TemplatesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TemplatesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TemplatesServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(TemplatesServiceClient).get_transport_class, type(TemplatesServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TemplatesServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the templates service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TemplatesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TemplatesServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_job_from_template(self, + request: Optional[Union[templates.CreateJobFromTemplateRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Creates a Cloud Dataflow job from a template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_create_job_from_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceAsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobFromTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = await client.create_job_from_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest, dict]]): + The request object. A request to create a Cloud Dataflow + job from a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + # Create or coerce a protobuf request object. + request = templates.CreateJobFromTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_from_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def launch_template(self, + request: Optional[Union[templates.LaunchTemplateRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchTemplateResponse: + r"""Launch a template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_launch_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceAsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = await client.launch_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest, dict]]): + The request object. A request to launch a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse: + Response to the request to launch a + template. + + """ + # Create or coerce a protobuf request object. + request = templates.LaunchTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.launch_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_template(self, + request: Optional[Union[templates.GetTemplateRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.GetTemplateResponse: + r"""Get the template associated with a template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + async def sample_get_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceAsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = await client.get_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetTemplateRequest, dict]]): + The request object. A request to retrieve a Cloud + Dataflow job template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.GetTemplateResponse: + The response to a GetTemplate + request. + + """ + # Create or coerce a protobuf request object. + request = templates.GetTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_template, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TemplatesServiceAsyncClient", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/client.py new file mode 100644 index 0000000..5254aca --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/client.py @@ -0,0 +1,644 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import TemplatesServiceGrpcTransport +from .transports.grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport +from .transports.rest import TemplatesServiceRestTransport + + +class TemplatesServiceClientMeta(type): + """Metaclass for the TemplatesService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TemplatesServiceTransport]] + _transport_registry["grpc"] = TemplatesServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport + _transport_registry["rest"] = TemplatesServiceRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TemplatesServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TemplatesServiceClient(metaclass=TemplatesServiceClientMeta): + """Provides a method to create Cloud Dataflow jobs from + templates. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataflow.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TemplatesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TemplatesServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TemplatesServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TemplatesServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TemplatesServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the templates service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TemplatesServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TemplatesServiceTransport): + # transport is a TemplatesServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_job_from_template(self, + request: Optional[Union[templates.CreateJobFromTemplateRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> jobs.Job: + r"""Creates a Cloud Dataflow job from a template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_create_job_from_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobFromTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.create_job_from_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest, dict]): + The request object. A request to create a Cloud Dataflow + job from a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a templates.CreateJobFromTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, templates.CreateJobFromTemplateRequest): + request = templates.CreateJobFromTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_from_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def launch_template(self, + request: Optional[Union[templates.LaunchTemplateRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.LaunchTemplateResponse: + r"""Launch a template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_launch_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.launch_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest, dict]): + The request object. A request to launch a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse: + Response to the request to launch a + template. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a templates.LaunchTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, templates.LaunchTemplateRequest): + request = templates.LaunchTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.launch_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_template(self, + request: Optional[Union[templates.GetTemplateRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> templates.GetTemplateResponse: + r"""Get the template associated with a template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataflow_v1beta3 + + def sample_get_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.get_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataflow_v1beta3.types.GetTemplateRequest, dict]): + The request object. A request to retrieve a Cloud + Dataflow job template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataflow_v1beta3.types.GetTemplateResponse: + The response to a GetTemplate + request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a templates.GetTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, templates.GetTemplateRequest): + request = templates.GetTemplateRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project_id", request.project_id), + ("location", request.location), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TemplatesServiceClient", +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py new file mode 100644 index 0000000..7a56700 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TemplatesServiceTransport +from .grpc import TemplatesServiceGrpcTransport +from .grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport +from .rest import TemplatesServiceRestTransport +from .rest import TemplatesServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TemplatesServiceTransport]] +_transport_registry['grpc'] = TemplatesServiceGrpcTransport +_transport_registry['grpc_asyncio'] = TemplatesServiceGrpcAsyncIOTransport +_transport_registry['rest'] = TemplatesServiceRestTransport + +__all__ = ( + 'TemplatesServiceTransport', + 'TemplatesServiceGrpcTransport', + 'TemplatesServiceGrpcAsyncIOTransport', + 'TemplatesServiceRestTransport', + 'TemplatesServiceRestInterceptor', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py new file mode 100644 index 0000000..78fab81 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.dataflow_v1beta3 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TemplatesServiceTransport(abc.ABC): + """Abstract transport class for TemplatesService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', + ) + + DEFAULT_HOST: str = 'dataflow.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_job_from_template: gapic_v1.method.wrap_method( + self.create_job_from_template, + default_timeout=None, + client_info=client_info, + ), + self.launch_template: gapic_v1.method.wrap_method( + self.launch_template, + default_timeout=None, + client_info=client_info, + ), + self.get_template: gapic_v1.method.wrap_method( + self.get_template, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_job_from_template(self) -> Callable[ + [templates.CreateJobFromTemplateRequest], + Union[ + jobs.Job, + Awaitable[jobs.Job] + ]]: + raise NotImplementedError() + + @property + def launch_template(self) -> Callable[ + [templates.LaunchTemplateRequest], + Union[ + templates.LaunchTemplateResponse, + Awaitable[templates.LaunchTemplateResponse] + ]]: + raise NotImplementedError() + + @property + def get_template(self) -> Callable[ + [templates.GetTemplateRequest], + Union[ + templates.GetTemplateResponse, + Awaitable[templates.GetTemplateResponse] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'TemplatesServiceTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py new file mode 100644 index 0000000..93faab5 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py @@ -0,0 +1,318 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from .base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO + + +class TemplatesServiceGrpcTransport(TemplatesServiceTransport): + """gRPC backend transport for TemplatesService. + + Provides a method to create Cloud Dataflow jobs from + templates. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_job_from_template(self) -> Callable[ + [templates.CreateJobFromTemplateRequest], + jobs.Job]: + r"""Return a callable for the create job from template method over gRPC. + + Creates a Cloud Dataflow job from a template. + + Returns: + Callable[[~.CreateJobFromTemplateRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_from_template' not in self._stubs: + self._stubs['create_job_from_template'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.TemplatesService/CreateJobFromTemplate', + request_serializer=templates.CreateJobFromTemplateRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs['create_job_from_template'] + + @property + def launch_template(self) -> Callable[ + [templates.LaunchTemplateRequest], + templates.LaunchTemplateResponse]: + r"""Return a callable for the launch template method over gRPC. + + Launch a template. + + Returns: + Callable[[~.LaunchTemplateRequest], + ~.LaunchTemplateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'launch_template' not in self._stubs: + self._stubs['launch_template'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.TemplatesService/LaunchTemplate', + request_serializer=templates.LaunchTemplateRequest.serialize, + response_deserializer=templates.LaunchTemplateResponse.deserialize, + ) + return self._stubs['launch_template'] + + @property + def get_template(self) -> Callable[ + [templates.GetTemplateRequest], + templates.GetTemplateResponse]: + r"""Return a callable for the get template method over gRPC. + + Get the template associated with a template. + + Returns: + Callable[[~.GetTemplateRequest], + ~.GetTemplateResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_template' not in self._stubs: + self._stubs['get_template'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.TemplatesService/GetTemplate', + request_serializer=templates.GetTemplateRequest.serialize, + response_deserializer=templates.GetTemplateResponse.deserialize, + ) + return self._stubs['get_template'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'TemplatesServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..d7b5758 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from .base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import TemplatesServiceGrpcTransport + + +class TemplatesServiceGrpcAsyncIOTransport(TemplatesServiceTransport): + """gRPC AsyncIO backend transport for TemplatesService. + + Provides a method to create Cloud Dataflow jobs from + templates. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_job_from_template(self) -> Callable[ + [templates.CreateJobFromTemplateRequest], + Awaitable[jobs.Job]]: + r"""Return a callable for the create job from template method over gRPC. + + Creates a Cloud Dataflow job from a template. + + Returns: + Callable[[~.CreateJobFromTemplateRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_from_template' not in self._stubs: + self._stubs['create_job_from_template'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.TemplatesService/CreateJobFromTemplate', + request_serializer=templates.CreateJobFromTemplateRequest.serialize, + response_deserializer=jobs.Job.deserialize, + ) + return self._stubs['create_job_from_template'] + + @property + def launch_template(self) -> Callable[ + [templates.LaunchTemplateRequest], + Awaitable[templates.LaunchTemplateResponse]]: + r"""Return a callable for the launch template method over gRPC. + + Launch a template. + + Returns: + Callable[[~.LaunchTemplateRequest], + Awaitable[~.LaunchTemplateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'launch_template' not in self._stubs: + self._stubs['launch_template'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.TemplatesService/LaunchTemplate', + request_serializer=templates.LaunchTemplateRequest.serialize, + response_deserializer=templates.LaunchTemplateResponse.deserialize, + ) + return self._stubs['launch_template'] + + @property + def get_template(self) -> Callable[ + [templates.GetTemplateRequest], + Awaitable[templates.GetTemplateResponse]]: + r"""Return a callable for the get template method over gRPC. + + Get the template associated with a template. + + Returns: + Callable[[~.GetTemplateRequest], + Awaitable[~.GetTemplateResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_template' not in self._stubs: + self._stubs['get_template'] = self.grpc_channel.unary_unary( + '/google.dataflow.v1beta3.TemplatesService/GetTemplate', + request_serializer=templates.GetTemplateRequest.serialize, + response_deserializer=templates.GetTemplateResponse.deserialize, + ) + return self._stubs['get_template'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'TemplatesServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py new file mode 100644 index 0000000..c704eb5 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py @@ -0,0 +1,528 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates + +from .base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class TemplatesServiceRestInterceptor: + """Interceptor for TemplatesService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the TemplatesServiceRestTransport. + + .. code-block:: python + class MyCustomTemplatesServiceInterceptor(TemplatesServiceRestInterceptor): + def pre_create_job_from_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_job_from_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_template(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_launch_template(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_launch_template(self, response): + logging.log(f"Received response: {response}") + return response + + transport = TemplatesServiceRestTransport(interceptor=MyCustomTemplatesServiceInterceptor()) + client = TemplatesServiceClient(transport=transport) + + + """ + def pre_create_job_from_template(self, request: templates.CreateJobFromTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[templates.CreateJobFromTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_job_from_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_create_job_from_template(self, response: jobs.Job) -> jobs.Job: + """Post-rpc interceptor for create_job_from_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + def pre_get_template(self, request: templates.GetTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[templates.GetTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_get_template(self, response: templates.GetTemplateResponse) -> templates.GetTemplateResponse: + """Post-rpc interceptor for get_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + def pre_launch_template(self, request: templates.LaunchTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[templates.LaunchTemplateRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for launch_template + + Override in a subclass to manipulate the request or metadata + before they are sent to the TemplatesService server. + """ + return request, metadata + + def post_launch_template(self, response: templates.LaunchTemplateResponse) -> templates.LaunchTemplateResponse: + """Post-rpc interceptor for launch_template + + Override in a subclass to manipulate the response + after it is returned by the TemplatesService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class TemplatesServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: TemplatesServiceRestInterceptor + + +class TemplatesServiceRestTransport(TemplatesServiceTransport): + """REST backend transport for TemplatesService. + + Provides a method to create Cloud Dataflow jobs from + templates. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__(self, *, + host: str = 'dataflow.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[TemplatesServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or TemplatesServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateJobFromTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("CreateJobFromTemplate") + + def __call__(self, + request: templates.CreateJobFromTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> jobs.Job: + r"""Call the create job from template method over HTTP. + + Args: + request (~.templates.CreateJobFromTemplateRequest): + The request object. A request to create a Cloud Dataflow + job from a template. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.jobs.Job: + Defines a job to be run by the Cloud + Dataflow service. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/templates', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/templates', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_job_from_template(request, metadata) + pb_request = templates.CreateJobFromTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = jobs.Job() + pb_resp = jobs.Job.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_job_from_template(resp) + return resp + + class _GetTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("GetTemplate") + + def __call__(self, + request: templates.GetTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> templates.GetTemplateResponse: + r"""Call the get template method over HTTP. + + Args: + request (~.templates.GetTemplateRequest): + The request object. A request to retrieve a Cloud + Dataflow job template. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.GetTemplateResponse: + The response to a GetTemplate + request. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/templates:get', + }, +{ + 'method': 'get', + 'uri': '/v1b3/projects/{project_id}/templates:get', + }, + ] + request, metadata = self._interceptor.pre_get_template(request, metadata) + pb_request = templates.GetTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.GetTemplateResponse() + pb_resp = templates.GetTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_template(resp) + return resp + + class _LaunchTemplate(TemplatesServiceRestStub): + def __hash__(self): + return hash("LaunchTemplate") + + def __call__(self, + request: templates.LaunchTemplateRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> templates.LaunchTemplateResponse: + r"""Call the launch template method over HTTP. + + Args: + request (~.templates.LaunchTemplateRequest): + The request object. A request to launch a template. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.templates.LaunchTemplateResponse: + Response to the request to launch a + template. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/locations/{location}/templates:launch', + 'body': 'launch_parameters', + }, +{ + 'method': 'post', + 'uri': '/v1b3/projects/{project_id}/templates:launch', + 'body': 'launch_parameters', + }, + ] + request, metadata = self._interceptor.pre_launch_template(request, metadata) + pb_request = templates.LaunchTemplateRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=True + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=True, + )) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = templates.LaunchTemplateResponse() + pb_resp = templates.LaunchTemplateResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_launch_template(resp) + return resp + + @property + def create_job_from_template(self) -> Callable[ + [templates.CreateJobFromTemplateRequest], + jobs.Job]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateJobFromTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_template(self) -> Callable[ + [templates.GetTemplateRequest], + templates.GetTemplateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def launch_template(self) -> Callable[ + [templates.LaunchTemplateRequest], + templates.LaunchTemplateResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._LaunchTemplate(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'TemplatesServiceRestTransport', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/__init__.py new file mode 100644 index 0000000..938458a --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/__init__.py @@ -0,0 +1,242 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .environment import ( + AutoscalingSettings, + DebugOptions, + Disk, + Environment, + Package, + SdkHarnessContainerImage, + TaskRunnerSettings, + WorkerPool, + WorkerSettings, + AutoscalingAlgorithm, + DefaultPackageSet, + FlexResourceSchedulingGoal, + JobType, + ShuffleMode, + TeardownPolicy, + WorkerIPAddressConfiguration, +) +from .jobs import ( + BigQueryIODetails, + BigTableIODetails, + CheckActiveJobsRequest, + CheckActiveJobsResponse, + CreateJobRequest, + DatastoreIODetails, + DisplayData, + ExecutionStageState, + ExecutionStageSummary, + FailedLocation, + FileIODetails, + GetJobRequest, + Job, + JobExecutionInfo, + JobExecutionStageInfo, + JobMetadata, + ListJobsRequest, + ListJobsResponse, + PipelineDescription, + PubSubIODetails, + SdkVersion, + SnapshotJobRequest, + SpannerIODetails, + Step, + TransformSummary, + UpdateJobRequest, + JobState, + JobView, + KindType, +) +from .messages import ( + AutoscalingEvent, + JobMessage, + ListJobMessagesRequest, + ListJobMessagesResponse, + StructuredMessage, + JobMessageImportance, +) +from .metrics import ( + GetJobExecutionDetailsRequest, + GetJobMetricsRequest, + GetStageExecutionDetailsRequest, + JobExecutionDetails, + JobMetrics, + MetricStructuredName, + MetricUpdate, + ProgressTimeseries, + StageExecutionDetails, + StageSummary, + WorkerDetails, + WorkItemDetails, + ExecutionState, +) +from .snapshots import ( + DeleteSnapshotRequest, + DeleteSnapshotResponse, + GetSnapshotRequest, + ListSnapshotsRequest, + ListSnapshotsResponse, + PubsubSnapshotMetadata, + Snapshot, + SnapshotState, +) +from .streaming import ( + ComputationTopology, + CustomSourceLocation, + DataDiskAssignment, + KeyRangeDataDiskAssignment, + KeyRangeLocation, + MountedDataDisk, + PubsubLocation, + StateFamilyConfig, + StreamingApplianceSnapshotConfig, + StreamingComputationRanges, + StreamingSideInputLocation, + StreamingStageLocation, + StreamLocation, + TopologyConfig, +) +from .templates import ( + ContainerSpec, + CreateJobFromTemplateRequest, + DynamicTemplateLaunchParams, + FlexTemplateRuntimeEnvironment, + GetTemplateRequest, + GetTemplateResponse, + InvalidTemplateParameters, + LaunchFlexTemplateParameter, + LaunchFlexTemplateRequest, + LaunchFlexTemplateResponse, + LaunchTemplateParameters, + LaunchTemplateRequest, + LaunchTemplateResponse, + ParameterMetadata, + RuntimeEnvironment, + RuntimeMetadata, + SDKInfo, + TemplateMetadata, + ParameterType, +) + +__all__ = ( + 'AutoscalingSettings', + 'DebugOptions', + 'Disk', + 'Environment', + 'Package', + 'SdkHarnessContainerImage', + 'TaskRunnerSettings', + 'WorkerPool', + 'WorkerSettings', + 'AutoscalingAlgorithm', + 'DefaultPackageSet', + 'FlexResourceSchedulingGoal', + 'JobType', + 'ShuffleMode', + 'TeardownPolicy', + 'WorkerIPAddressConfiguration', + 'BigQueryIODetails', + 'BigTableIODetails', + 'CheckActiveJobsRequest', + 'CheckActiveJobsResponse', + 'CreateJobRequest', + 'DatastoreIODetails', + 'DisplayData', + 'ExecutionStageState', + 'ExecutionStageSummary', + 'FailedLocation', + 'FileIODetails', + 'GetJobRequest', + 'Job', + 'JobExecutionInfo', + 'JobExecutionStageInfo', + 'JobMetadata', + 'ListJobsRequest', + 'ListJobsResponse', + 'PipelineDescription', + 'PubSubIODetails', + 'SdkVersion', + 'SnapshotJobRequest', + 'SpannerIODetails', + 'Step', + 'TransformSummary', + 'UpdateJobRequest', + 'JobState', + 'JobView', + 'KindType', + 'AutoscalingEvent', + 'JobMessage', + 'ListJobMessagesRequest', + 'ListJobMessagesResponse', + 'StructuredMessage', + 'JobMessageImportance', + 'GetJobExecutionDetailsRequest', + 'GetJobMetricsRequest', + 'GetStageExecutionDetailsRequest', + 'JobExecutionDetails', + 'JobMetrics', + 'MetricStructuredName', + 'MetricUpdate', + 'ProgressTimeseries', + 'StageExecutionDetails', + 'StageSummary', + 'WorkerDetails', + 'WorkItemDetails', + 'ExecutionState', + 'DeleteSnapshotRequest', + 'DeleteSnapshotResponse', + 'GetSnapshotRequest', + 'ListSnapshotsRequest', + 'ListSnapshotsResponse', + 'PubsubSnapshotMetadata', + 'Snapshot', + 'SnapshotState', + 'ComputationTopology', + 'CustomSourceLocation', + 'DataDiskAssignment', + 'KeyRangeDataDiskAssignment', + 'KeyRangeLocation', + 'MountedDataDisk', + 'PubsubLocation', + 'StateFamilyConfig', + 'StreamingApplianceSnapshotConfig', + 'StreamingComputationRanges', + 'StreamingSideInputLocation', + 'StreamingStageLocation', + 'StreamLocation', + 'TopologyConfig', + 'ContainerSpec', + 'CreateJobFromTemplateRequest', + 'DynamicTemplateLaunchParams', + 'FlexTemplateRuntimeEnvironment', + 'GetTemplateRequest', + 'GetTemplateResponse', + 'InvalidTemplateParameters', + 'LaunchFlexTemplateParameter', + 'LaunchFlexTemplateRequest', + 'LaunchFlexTemplateResponse', + 'LaunchTemplateParameters', + 'LaunchTemplateRequest', + 'LaunchTemplateResponse', + 'ParameterMetadata', + 'RuntimeEnvironment', + 'RuntimeMetadata', + 'SDKInfo', + 'TemplateMetadata', + 'ParameterType', +) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/environment.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/environment.py new file mode 100644 index 0000000..5a494d7 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/environment.py @@ -0,0 +1,891 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.dataflow.v1beta3', + manifest={ + 'JobType', + 'FlexResourceSchedulingGoal', + 'TeardownPolicy', + 'DefaultPackageSet', + 'AutoscalingAlgorithm', + 'WorkerIPAddressConfiguration', + 'ShuffleMode', + 'Environment', + 'Package', + 'Disk', + 'WorkerSettings', + 'TaskRunnerSettings', + 'AutoscalingSettings', + 'SdkHarnessContainerImage', + 'WorkerPool', + 'DebugOptions', + }, +) + + +class JobType(proto.Enum): + r"""Specifies the processing model used by a + [google.dataflow.v1beta3.Job], which determines the way the Job is + managed by the Cloud Dataflow service (how workers are scheduled, + how inputs are sharded, etc). + """ + JOB_TYPE_UNKNOWN = 0 + JOB_TYPE_BATCH = 1 + JOB_TYPE_STREAMING = 2 + + +class FlexResourceSchedulingGoal(proto.Enum): + r"""Specifies the resource to optimize for in Flexible Resource + Scheduling. + """ + FLEXRS_UNSPECIFIED = 0 + FLEXRS_SPEED_OPTIMIZED = 1 + FLEXRS_COST_OPTIMIZED = 2 + + +class TeardownPolicy(proto.Enum): + r"""Specifies what happens to a resource when a Cloud Dataflow + [google.dataflow.v1beta3.Job][google.dataflow.v1beta3.Job] has + completed. + """ + TEARDOWN_POLICY_UNKNOWN = 0 + TEARDOWN_ALWAYS = 1 + TEARDOWN_ON_SUCCESS = 2 + TEARDOWN_NEVER = 3 + + +class DefaultPackageSet(proto.Enum): + r"""The default set of packages to be staged on a pool of + workers. + """ + DEFAULT_PACKAGE_SET_UNKNOWN = 0 + DEFAULT_PACKAGE_SET_NONE = 1 + DEFAULT_PACKAGE_SET_JAVA = 2 + DEFAULT_PACKAGE_SET_PYTHON = 3 + + +class AutoscalingAlgorithm(proto.Enum): + r"""Specifies the algorithm used to determine the number of + worker processes to run at any given point in time, based on the + amount of data left to process, the number of workers, and how + quickly existing workers are processing data. + """ + AUTOSCALING_ALGORITHM_UNKNOWN = 0 + AUTOSCALING_ALGORITHM_NONE = 1 + AUTOSCALING_ALGORITHM_BASIC = 2 + + +class WorkerIPAddressConfiguration(proto.Enum): + r"""Specifies how IP addresses should be allocated to the worker + machines. + """ + WORKER_IP_UNSPECIFIED = 0 + WORKER_IP_PUBLIC = 1 + WORKER_IP_PRIVATE = 2 + + +class ShuffleMode(proto.Enum): + r"""Specifies the shuffle mode used by a [google.dataflow.v1beta3.Job], + which determines the approach data is shuffled during processing. + More details in: + https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#dataflow-shuffle + """ + SHUFFLE_MODE_UNSPECIFIED = 0 + VM_BASED = 1 + SERVICE_BASED = 2 + + +class Environment(proto.Message): + r"""Describes the environment in which a Dataflow Job runs. + + Attributes: + temp_storage_prefix (str): + The prefix of the resources the system should use for + temporary storage. The system will append the suffix + "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is + the value of the job_name field. The resulting bucket and + object prefix is used as the prefix of the resources used to + store temporary data needed during the job execution. NOTE: + This will override the value in taskrunner_settings. The + supported resource type is: + + Google Cloud Storage: + + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + cluster_manager_api_service (str): + The type of cluster manager API to use. If + unknown or unspecified, the service will attempt + to choose a reasonable default. This should be + in the form of the API service name, e.g. + "compute.googleapis.com". + experiments (MutableSequence[str]): + The list of experiments to enable. This field should be used + for SDK related experiments and not for service related + experiments. The proper field for service related + experiments is service_options. + service_options (MutableSequence[str]): + The list of service options to enable. This + field should be used for service related + experiments only. These experiments, when + graduating to GA, should be replaced by + dedicated fields or become default (i.e. always + on). + service_kms_key_name (str): + If set, contains the Cloud KMS key identifier used to + encrypt data at rest, AKA a Customer Managed Encryption Key + (CMEK). + + Format: + projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY + worker_pools (MutableSequence[google.cloud.dataflow_v1beta3.types.WorkerPool]): + The worker pools. At least one "harness" + worker pool must be specified in order for the + job to have workers. + user_agent (google.protobuf.struct_pb2.Struct): + A description of the process that generated + the request. + version (google.protobuf.struct_pb2.Struct): + A structure describing which components and + their versions of the service are required in + order to run the job. + dataset (str): + The dataset for the current project where + various workflow related tables are stored. + + The supported resource type is: + + Google BigQuery: + bigquery.googleapis.com/{dataset} + sdk_pipeline_options (google.protobuf.struct_pb2.Struct): + The Cloud Dataflow SDK pipeline options + specified by the user. These options are passed + through the service and are used to recreate the + SDK pipeline options on the worker in a language + agnostic and platform independent way. + internal_experiments (google.protobuf.any_pb2.Any): + Experimental settings. + service_account_email (str): + Identity to run virtual machines as. Defaults + to the default account. + flex_resource_scheduling_goal (google.cloud.dataflow_v1beta3.types.FlexResourceSchedulingGoal): + Which Flexible Resource Scheduling mode to + run in. + worker_region (str): + The Compute Engine region + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1". + Mutually exclusive with worker_zone. If neither + worker_region nor worker_zone is specified, default to the + control plane's region. + worker_zone (str): + The Compute Engine zone + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1-a". + Mutually exclusive with worker_region. If neither + worker_region nor worker_zone is specified, a zone in the + control plane's region is chosen based on available + capacity. + shuffle_mode (google.cloud.dataflow_v1beta3.types.ShuffleMode): + Output only. The shuffle mode used for the + job. + debug_options (google.cloud.dataflow_v1beta3.types.DebugOptions): + Any debugging options to be supplied to the + job. + """ + + temp_storage_prefix: str = proto.Field( + proto.STRING, + number=1, + ) + cluster_manager_api_service: str = proto.Field( + proto.STRING, + number=2, + ) + experiments: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + service_options: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=16, + ) + service_kms_key_name: str = proto.Field( + proto.STRING, + number=12, + ) + worker_pools: MutableSequence['WorkerPool'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='WorkerPool', + ) + user_agent: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + version: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) + dataset: str = proto.Field( + proto.STRING, + number=7, + ) + sdk_pipeline_options: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=8, + message=struct_pb2.Struct, + ) + internal_experiments: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=9, + message=any_pb2.Any, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=10, + ) + flex_resource_scheduling_goal: 'FlexResourceSchedulingGoal' = proto.Field( + proto.ENUM, + number=11, + enum='FlexResourceSchedulingGoal', + ) + worker_region: str = proto.Field( + proto.STRING, + number=13, + ) + worker_zone: str = proto.Field( + proto.STRING, + number=14, + ) + shuffle_mode: 'ShuffleMode' = proto.Field( + proto.ENUM, + number=15, + enum='ShuffleMode', + ) + debug_options: 'DebugOptions' = proto.Field( + proto.MESSAGE, + number=17, + message='DebugOptions', + ) + + +class Package(proto.Message): + r"""The packages that must be installed in order for a worker to + run the steps of the Cloud Dataflow job that will be assigned to + its worker pool. + + This is the mechanism by which the Cloud Dataflow SDK causes + code to be loaded onto the workers. For example, the Cloud + Dataflow Java SDK might use this to install jars containing the + user's code and all of the various dependencies (libraries, data + files, etc.) required in order for that code to run. + + Attributes: + name (str): + The name of the package. + location (str): + The resource to read the package from. The + supported resource type is: + Google Cloud Storage: + + storage.googleapis.com/{bucket} + bucket.storage.googleapis.com/ + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Disk(proto.Message): + r"""Describes the data disk used by a workflow job. + + Attributes: + size_gb (int): + Size of disk in GB. If zero or unspecified, + the service will attempt to choose a reasonable + default. + disk_type (str): + Disk storage type, as defined by Google + Compute Engine. This must be a disk type + appropriate to the project and zone in which the + workers will run. If unknown or unspecified, + the service will attempt to choose a reasonable + default. + + For example, the standard persistent disk type + is a resource name typically ending in + "pd-standard". If SSD persistent disks are + available, the resource name typically ends with + "pd-ssd". The actual valid values are defined + the Google Compute Engine API, not by the Cloud + Dataflow API; consult the Google Compute Engine + documentation for more information about + determining the set of available disk types for + a particular project and zone. + Google Compute Engine Disk types are local to a + particular project in a particular zone, and so + the resource name will typically look something + like this: + + compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard + mount_point (str): + Directory in a VM where disk is mounted. + """ + + size_gb: int = proto.Field( + proto.INT32, + number=1, + ) + disk_type: str = proto.Field( + proto.STRING, + number=2, + ) + mount_point: str = proto.Field( + proto.STRING, + number=3, + ) + + +class WorkerSettings(proto.Message): + r"""Provides data to pass through to the worker harness. + + Attributes: + base_url (str): + The base URL for accessing Google Cloud APIs. + When workers access Google Cloud APIs, they + logically do so via relative URLs. If this + field is specified, it supplies the base URL to + use for resolving these relative URLs. The + normative algorithm used is defined by RFC 1808, + "Relative Uniform Resource Locators". + + If not specified, the default value is + "http://www.googleapis.com/". + reporting_enabled (bool): + Whether to send work progress updates to the + service. + service_path (str): + The Cloud Dataflow service path relative to + the root URL, for example, + "dataflow/v1b3/projects". + shuffle_service_path (str): + The Shuffle service path relative to the root + URL, for example, "shuffle/v1beta1". + worker_id (str): + The ID of the worker running this pipeline. + temp_storage_prefix (str): + The prefix of the resources the system should + use for temporary storage. + + The supported resource type is: + + Google Cloud Storage: + + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + """ + + base_url: str = proto.Field( + proto.STRING, + number=1, + ) + reporting_enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + service_path: str = proto.Field( + proto.STRING, + number=3, + ) + shuffle_service_path: str = proto.Field( + proto.STRING, + number=4, + ) + worker_id: str = proto.Field( + proto.STRING, + number=5, + ) + temp_storage_prefix: str = proto.Field( + proto.STRING, + number=6, + ) + + +class TaskRunnerSettings(proto.Message): + r"""Taskrunner configuration settings. + + Attributes: + task_user (str): + The UNIX user ID on the worker VM to use for + tasks launched by taskrunner; e.g. "root". + task_group (str): + The UNIX group ID on the worker VM to use for + tasks launched by taskrunner; e.g. "wheel". + oauth_scopes (MutableSequence[str]): + The OAuth2 scopes to be requested by the + taskrunner in order to access the Cloud Dataflow + API. + base_url (str): + The base URL for the taskrunner to use when + accessing Google Cloud APIs. + When workers access Google Cloud APIs, they + logically do so via relative URLs. If this + field is specified, it supplies the base URL to + use for resolving these relative URLs. The + normative algorithm used is defined by RFC 1808, + "Relative Uniform Resource Locators". + + If not specified, the default value is + "http://www.googleapis.com/". + dataflow_api_version (str): + The API version of endpoint, e.g. "v1b3". + parallel_worker_settings (google.cloud.dataflow_v1beta3.types.WorkerSettings): + The settings to pass to the parallel worker + harness. + base_task_dir (str): + The location on the worker for task-specific + subdirectories. + continue_on_exception (bool): + Whether to continue taskrunner if an + exception is hit. + log_to_serialconsole (bool): + Whether to send taskrunner log info to Google + Compute Engine VM serial console. + alsologtostderr (bool): + Whether to also send taskrunner log info to + stderr. + log_upload_location (str): + Indicates where to put logs. If this is not + specified, the logs will not be uploaded. + + The supported resource type is: + + Google Cloud Storage: + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + log_dir (str): + The directory on the VM to store logs. + temp_storage_prefix (str): + The prefix of the resources the taskrunner + should use for temporary storage. + + The supported resource type is: + + Google Cloud Storage: + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + harness_command (str): + The command to launch the worker harness. + workflow_file_name (str): + The file to store the workflow in. + commandlines_file_name (str): + The file to store preprocessing commands in. + vm_id (str): + The ID string of the VM. + language_hint (str): + The suggested backend language. + streaming_worker_main_class (str): + The streaming worker main class name. + """ + + task_user: str = proto.Field( + proto.STRING, + number=1, + ) + task_group: str = proto.Field( + proto.STRING, + number=2, + ) + oauth_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + base_url: str = proto.Field( + proto.STRING, + number=4, + ) + dataflow_api_version: str = proto.Field( + proto.STRING, + number=5, + ) + parallel_worker_settings: 'WorkerSettings' = proto.Field( + proto.MESSAGE, + number=6, + message='WorkerSettings', + ) + base_task_dir: str = proto.Field( + proto.STRING, + number=7, + ) + continue_on_exception: bool = proto.Field( + proto.BOOL, + number=8, + ) + log_to_serialconsole: bool = proto.Field( + proto.BOOL, + number=9, + ) + alsologtostderr: bool = proto.Field( + proto.BOOL, + number=10, + ) + log_upload_location: str = proto.Field( + proto.STRING, + number=11, + ) + log_dir: str = proto.Field( + proto.STRING, + number=12, + ) + temp_storage_prefix: str = proto.Field( + proto.STRING, + number=13, + ) + harness_command: str = proto.Field( + proto.STRING, + number=14, + ) + workflow_file_name: str = proto.Field( + proto.STRING, + number=15, + ) + commandlines_file_name: str = proto.Field( + proto.STRING, + number=16, + ) + vm_id: str = proto.Field( + proto.STRING, + number=17, + ) + language_hint: str = proto.Field( + proto.STRING, + number=18, + ) + streaming_worker_main_class: str = proto.Field( + proto.STRING, + number=19, + ) + + +class AutoscalingSettings(proto.Message): + r"""Settings for WorkerPool autoscaling. + + Attributes: + algorithm (google.cloud.dataflow_v1beta3.types.AutoscalingAlgorithm): + The algorithm to use for autoscaling. + max_num_workers (int): + The maximum number of workers to cap scaling + at. + """ + + algorithm: 'AutoscalingAlgorithm' = proto.Field( + proto.ENUM, + number=1, + enum='AutoscalingAlgorithm', + ) + max_num_workers: int = proto.Field( + proto.INT32, + number=2, + ) + + +class SdkHarnessContainerImage(proto.Message): + r"""Defines a SDK harness container for executing Dataflow + pipelines. + + Attributes: + container_image (str): + A docker container image that resides in + Google Container Registry. + use_single_core_per_container (bool): + If true, recommends the Dataflow service to + use only one core per SDK container instance + with this image. If false (or unset) recommends + using more than one core per SDK container + instance with this image for efficiency. Note + that Dataflow service may choose to override + this property if needed. + environment_id (str): + Environment ID for the Beam runner API proto + Environment that corresponds to the current SDK + Harness. + capabilities (MutableSequence[str]): + The set of capabilities enumerated in the above Environment + proto. See also + https://github.com/apache/beam/blob/master/model/pipeline/src/main/proto/beam_runner_api.proto + """ + + container_image: str = proto.Field( + proto.STRING, + number=1, + ) + use_single_core_per_container: bool = proto.Field( + proto.BOOL, + number=2, + ) + environment_id: str = proto.Field( + proto.STRING, + number=3, + ) + capabilities: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class WorkerPool(proto.Message): + r"""Describes one particular pool of Cloud Dataflow workers to be + instantiated by the Cloud Dataflow service in order to perform + the computations required by a job. Note that a workflow job + may use multiple pools, in order to match the various + computational requirements of the various stages of the job. + + Attributes: + kind (str): + The kind of the worker pool; currently only ``harness`` and + ``shuffle`` are supported. + num_workers (int): + Number of Google Compute Engine workers in + this pool needed to execute the job. If zero or + unspecified, the service will attempt to choose + a reasonable default. + packages (MutableSequence[google.cloud.dataflow_v1beta3.types.Package]): + Packages to be installed on workers. + default_package_set (google.cloud.dataflow_v1beta3.types.DefaultPackageSet): + The default package set to install. This + allows the service to select a default set of + packages which are useful to worker harnesses + written in a particular language. + machine_type (str): + Machine type (e.g. "n1-standard-1"). If + empty or unspecified, the service will attempt + to choose a reasonable default. + teardown_policy (google.cloud.dataflow_v1beta3.types.TeardownPolicy): + Sets the policy for determining when to turndown worker + pool. Allowed values are: ``TEARDOWN_ALWAYS``, + ``TEARDOWN_ON_SUCCESS``, and ``TEARDOWN_NEVER``. + ``TEARDOWN_ALWAYS`` means workers are always torn down + regardless of whether the job succeeds. + ``TEARDOWN_ON_SUCCESS`` means workers are torn down if the + job succeeds. ``TEARDOWN_NEVER`` means the workers are never + torn down. + + If the workers are not torn down by the service, they will + continue to run and use Google Compute Engine VM resources + in the user's project until they are explicitly terminated + by the user. Because of this, Google recommends using the + ``TEARDOWN_ALWAYS`` policy except for small, manually + supervised test jobs. + + If unknown or unspecified, the service will attempt to + choose a reasonable default. + disk_size_gb (int): + Size of root disk for VMs, in GB. If zero or + unspecified, the service will attempt to choose + a reasonable default. + disk_type (str): + Type of root disk for VMs. If empty or + unspecified, the service will attempt to choose + a reasonable default. + disk_source_image (str): + Fully qualified source image for disks. + zone (str): + Zone to run the worker pools in. If empty or + unspecified, the service will attempt to choose + a reasonable default. + taskrunner_settings (google.cloud.dataflow_v1beta3.types.TaskRunnerSettings): + Settings passed through to Google Compute + Engine workers when using the standard Dataflow + task runner. Users should ignore this field. + on_host_maintenance (str): + The action to take on host maintenance, as + defined by the Google Compute Engine API. + data_disks (MutableSequence[google.cloud.dataflow_v1beta3.types.Disk]): + Data disks that are used by a VM in this + workflow. + metadata (MutableMapping[str, str]): + Metadata to set on the Google Compute Engine + VMs. + autoscaling_settings (google.cloud.dataflow_v1beta3.types.AutoscalingSettings): + Settings for autoscaling of this WorkerPool. + pool_args (google.protobuf.any_pb2.Any): + Extra arguments for this worker pool. + network (str): + Network to which VMs will be assigned. If + empty or unspecified, the service will use the + network "default". + subnetwork (str): + Subnetwork to which VMs will be assigned, if + desired. Expected to be of the form + "regions/REGION/subnetworks/SUBNETWORK". + worker_harness_container_image (str): + Required. Docker container image that executes the Cloud + Dataflow worker harness, residing in Google Container + Registry. + + Deprecated for the Fn API path. Use + sdk_harness_container_images instead. + num_threads_per_worker (int): + The number of threads per worker harness. If + empty or unspecified, the service will choose a + number of threads (according to the number of + cores on the selected machine type for batch, or + 1 by convention for streaming). + ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): + Configuration for VM IPs. + sdk_harness_container_images (MutableSequence[google.cloud.dataflow_v1beta3.types.SdkHarnessContainerImage]): + Set of SDK harness containers needed to + execute this pipeline. This will only be set in + the Fn API path. For non-cross-language + pipelines this should have only one entry. + Cross-language pipelines will have two or more + entries. + """ + + kind: str = proto.Field( + proto.STRING, + number=1, + ) + num_workers: int = proto.Field( + proto.INT32, + number=2, + ) + packages: MutableSequence['Package'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Package', + ) + default_package_set: 'DefaultPackageSet' = proto.Field( + proto.ENUM, + number=4, + enum='DefaultPackageSet', + ) + machine_type: str = proto.Field( + proto.STRING, + number=5, + ) + teardown_policy: 'TeardownPolicy' = proto.Field( + proto.ENUM, + number=6, + enum='TeardownPolicy', + ) + disk_size_gb: int = proto.Field( + proto.INT32, + number=7, + ) + disk_type: str = proto.Field( + proto.STRING, + number=16, + ) + disk_source_image: str = proto.Field( + proto.STRING, + number=8, + ) + zone: str = proto.Field( + proto.STRING, + number=9, + ) + taskrunner_settings: 'TaskRunnerSettings' = proto.Field( + proto.MESSAGE, + number=10, + message='TaskRunnerSettings', + ) + on_host_maintenance: str = proto.Field( + proto.STRING, + number=11, + ) + data_disks: MutableSequence['Disk'] = proto.RepeatedField( + proto.MESSAGE, + number=12, + message='Disk', + ) + metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + autoscaling_settings: 'AutoscalingSettings' = proto.Field( + proto.MESSAGE, + number=14, + message='AutoscalingSettings', + ) + pool_args: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=15, + message=any_pb2.Any, + ) + network: str = proto.Field( + proto.STRING, + number=17, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=19, + ) + worker_harness_container_image: str = proto.Field( + proto.STRING, + number=18, + ) + num_threads_per_worker: int = proto.Field( + proto.INT32, + number=20, + ) + ip_configuration: 'WorkerIPAddressConfiguration' = proto.Field( + proto.ENUM, + number=21, + enum='WorkerIPAddressConfiguration', + ) + sdk_harness_container_images: MutableSequence['SdkHarnessContainerImage'] = proto.RepeatedField( + proto.MESSAGE, + number=22, + message='SdkHarnessContainerImage', + ) + + +class DebugOptions(proto.Message): + r"""Describes any options that have an effect on the debugging of + pipelines. + + Attributes: + enable_hot_key_logging (bool): + When true, enables the logging of the literal + hot key to the user's Cloud Logging. + """ + + enable_hot_key_logging: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/jobs.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/jobs.py new file mode 100644 index 0000000..89be109 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/jobs.py @@ -0,0 +1,1425 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataflow_v1beta3.types import environment as gd_environment +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.dataflow.v1beta3', + manifest={ + 'KindType', + 'JobState', + 'JobView', + 'Job', + 'DatastoreIODetails', + 'PubSubIODetails', + 'FileIODetails', + 'BigTableIODetails', + 'BigQueryIODetails', + 'SpannerIODetails', + 'SdkVersion', + 'JobMetadata', + 'ExecutionStageState', + 'PipelineDescription', + 'TransformSummary', + 'ExecutionStageSummary', + 'DisplayData', + 'Step', + 'JobExecutionInfo', + 'JobExecutionStageInfo', + 'CreateJobRequest', + 'GetJobRequest', + 'UpdateJobRequest', + 'ListJobsRequest', + 'FailedLocation', + 'ListJobsResponse', + 'SnapshotJobRequest', + 'CheckActiveJobsRequest', + 'CheckActiveJobsResponse', + }, +) + + +class KindType(proto.Enum): + r"""Type of transform or stage operation.""" + UNKNOWN_KIND = 0 + PAR_DO_KIND = 1 + GROUP_BY_KEY_KIND = 2 + FLATTEN_KIND = 3 + READ_KIND = 4 + WRITE_KIND = 5 + CONSTANT_KIND = 6 + SINGLETON_KIND = 7 + SHUFFLE_KIND = 8 + + +class JobState(proto.Enum): + r"""Describes the overall state of a + [google.dataflow.v1beta3.Job][google.dataflow.v1beta3.Job]. + """ + JOB_STATE_UNKNOWN = 0 + JOB_STATE_STOPPED = 1 + JOB_STATE_RUNNING = 2 + JOB_STATE_DONE = 3 + JOB_STATE_FAILED = 4 + JOB_STATE_CANCELLED = 5 + JOB_STATE_UPDATED = 6 + JOB_STATE_DRAINING = 7 + JOB_STATE_DRAINED = 8 + JOB_STATE_PENDING = 9 + JOB_STATE_CANCELLING = 10 + JOB_STATE_QUEUED = 11 + JOB_STATE_RESOURCE_CLEANING_UP = 12 + + +class JobView(proto.Enum): + r"""Selector for how much information is returned in Job + responses. + """ + JOB_VIEW_UNKNOWN = 0 + JOB_VIEW_SUMMARY = 1 + JOB_VIEW_ALL = 2 + JOB_VIEW_DESCRIPTION = 3 + + +class Job(proto.Message): + r"""Defines a job to be run by the Cloud Dataflow service. + + Attributes: + id (str): + The unique ID of this job. + This field is set by the Cloud Dataflow service + when the Job is created, and is immutable for + the life of the job. + project_id (str): + The ID of the Cloud Platform project that the + job belongs to. + name (str): + The user-specified Cloud Dataflow job name. + + Only one Job with a given name may exist in a project at any + given time. If a caller attempts to create a Job with the + same name as an already-existing Job, the attempt returns + the existing Job. + + The name must match the regular expression + ``[a-z]([-a-z0-9]{0,1022}[a-z0-9])?`` + type_ (google.cloud.dataflow_v1beta3.types.JobType): + The type of Cloud Dataflow job. + environment (google.cloud.dataflow_v1beta3.types.Environment): + The environment for the job. + steps (MutableSequence[google.cloud.dataflow_v1beta3.types.Step]): + Exactly one of step or steps_location should be specified. + + The top-level steps that constitute the entire job. Only + retrieved with JOB_VIEW_ALL. + steps_location (str): + The Cloud Storage location where the steps + are stored. + current_state (google.cloud.dataflow_v1beta3.types.JobState): + The current state of the job. + + Jobs are created in the ``JOB_STATE_STOPPED`` state unless + otherwise specified. + + A job in the ``JOB_STATE_RUNNING`` state may asynchronously + enter a terminal state. After a job has reached a terminal + state, no further state updates may be made. + + This field may be mutated by the Cloud Dataflow service; + callers cannot mutate it. + current_state_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp associated with the current + state. + requested_state (google.cloud.dataflow_v1beta3.types.JobState): + The job's requested state. + + ``UpdateJob`` may be used to switch between the + ``JOB_STATE_STOPPED`` and ``JOB_STATE_RUNNING`` states, by + setting requested_state. ``UpdateJob`` may also be used to + directly set a job's requested state to + ``JOB_STATE_CANCELLED`` or ``JOB_STATE_DONE``, irrevocably + terminating the job if it has not already reached a terminal + state. + execution_info (google.cloud.dataflow_v1beta3.types.JobExecutionInfo): + Deprecated. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the job was initially + created. Immutable and set by the Cloud Dataflow + service. + replace_job_id (str): + If this job is an update of an existing job, this field is + the job ID of the job it replaced. + + When sending a ``CreateJobRequest``, you can update a job by + specifying it here. The job named here is stopped, and its + intermediate state is transferred to this job. + transform_name_mapping (MutableMapping[str, str]): + The map of transform name prefixes of the job + to be replaced to the corresponding name + prefixes of the new job. + client_request_id (str): + The client's unique identifier of the job, + re-used across retried attempts. If this field + is set, the service will ensure its uniqueness. + The request to create a job will fail if the + service has knowledge of a previously submitted + job with the same client's ID and job name. The + caller may use this field to ensure idempotence + of job creation across retried attempts to + create a job. By default, the field is empty + and, in that case, the service ignores it. + replaced_by_job_id (str): + If another job is an update of this job (and thus, this job + is in ``JOB_STATE_UPDATED``), this field contains the ID of + that job. + temp_files (MutableSequence[str]): + A set of files the system should be aware of + that are used for temporary storage. These + temporary files will be removed on job + completion. + No duplicates are allowed. + No file patterns are supported. + + The supported files are: + + Google Cloud Storage: + + storage.googleapis.com/{bucket}/{object} + bucket.storage.googleapis.com/{object} + labels (MutableMapping[str, str]): + User-defined labels for this job. + + The labels map can contain no more than 64 entries. Entries + of the labels map are UTF8 strings that comply with the + following restrictions: + + - Keys must conform to regexp: + [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + - Values must conform to regexp: + [\p{Ll}\p{Lo}\p{N}_-]{0,63} + - Both keys and values are additionally constrained to be + <= 128 bytes in size. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + pipeline_description (google.cloud.dataflow_v1beta3.types.PipelineDescription): + Preliminary field: The format of this data may change at any + time. A description of the user pipeline and stages through + which it is executed. Created by Cloud Dataflow service. + Only retrieved with JOB_VIEW_DESCRIPTION or JOB_VIEW_ALL. + stage_states (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageState]): + This field may be mutated by the Cloud + Dataflow service; callers cannot mutate it. + job_metadata (google.cloud.dataflow_v1beta3.types.JobMetadata): + This field is populated by the Dataflow + service to support filtering jobs by the + metadata values provided here. Populated for + ListJobs and all GetJob views SUMMARY and + higher. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the job was started (transitioned to + JOB_STATE_PENDING). Flexible resource scheduling jobs are + started with some delay after job creation, so start_time is + unset before start and is updated when the job is started by + the Cloud Dataflow service. For other jobs, start_time + always equals to create_time and is immutable and set by the + Cloud Dataflow service. + created_from_snapshot_id (str): + If this is specified, the job's initial state + is populated from the given snapshot. + satisfies_pzs (bool): + Reserved for future use. This field is set + only in responses from the server; it is ignored + if it is set in any requests. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( + proto.STRING, + number=3, + ) + type_: gd_environment.JobType = proto.Field( + proto.ENUM, + number=4, + enum=gd_environment.JobType, + ) + environment: gd_environment.Environment = proto.Field( + proto.MESSAGE, + number=5, + message=gd_environment.Environment, + ) + steps: MutableSequence['Step'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='Step', + ) + steps_location: str = proto.Field( + proto.STRING, + number=24, + ) + current_state: 'JobState' = proto.Field( + proto.ENUM, + number=7, + enum='JobState', + ) + current_state_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + requested_state: 'JobState' = proto.Field( + proto.ENUM, + number=9, + enum='JobState', + ) + execution_info: 'JobExecutionInfo' = proto.Field( + proto.MESSAGE, + number=10, + message='JobExecutionInfo', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + replace_job_id: str = proto.Field( + proto.STRING, + number=12, + ) + transform_name_mapping: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=13, + ) + client_request_id: str = proto.Field( + proto.STRING, + number=14, + ) + replaced_by_job_id: str = proto.Field( + proto.STRING, + number=15, + ) + temp_files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=16, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=17, + ) + location: str = proto.Field( + proto.STRING, + number=18, + ) + pipeline_description: 'PipelineDescription' = proto.Field( + proto.MESSAGE, + number=19, + message='PipelineDescription', + ) + stage_states: MutableSequence['ExecutionStageState'] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message='ExecutionStageState', + ) + job_metadata: 'JobMetadata' = proto.Field( + proto.MESSAGE, + number=21, + message='JobMetadata', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=22, + message=timestamp_pb2.Timestamp, + ) + created_from_snapshot_id: str = proto.Field( + proto.STRING, + number=23, + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=25, + ) + + +class DatastoreIODetails(proto.Message): + r"""Metadata for a Datastore connector used by the job. + + Attributes: + namespace (str): + Namespace used in the connection. + project_id (str): + ProjectId accessed in the connection. + """ + + namespace: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class PubSubIODetails(proto.Message): + r"""Metadata for a Pub/Sub connector used by the job. + + Attributes: + topic (str): + Topic accessed in the connection. + subscription (str): + Subscription used in the connection. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + subscription: str = proto.Field( + proto.STRING, + number=2, + ) + + +class FileIODetails(proto.Message): + r"""Metadata for a File connector used by the job. + + Attributes: + file_pattern (str): + File Pattern used to access files by the + connector. + """ + + file_pattern: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BigTableIODetails(proto.Message): + r"""Metadata for a Cloud Bigtable connector used by the job. + + Attributes: + project_id (str): + ProjectId accessed in the connection. + instance_id (str): + InstanceId accessed in the connection. + table_id (str): + TableId accessed in the connection. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class BigQueryIODetails(proto.Message): + r"""Metadata for a BigQuery connector used by the job. + + Attributes: + table (str): + Table accessed in the connection. + dataset (str): + Dataset accessed in the connection. + project_id (str): + Project accessed in the connection. + query (str): + Query used to access data in the connection. + """ + + table: str = proto.Field( + proto.STRING, + number=1, + ) + dataset: str = proto.Field( + proto.STRING, + number=2, + ) + project_id: str = proto.Field( + proto.STRING, + number=3, + ) + query: str = proto.Field( + proto.STRING, + number=4, + ) + + +class SpannerIODetails(proto.Message): + r"""Metadata for a Spanner connector used by the job. + + Attributes: + project_id (str): + ProjectId accessed in the connection. + instance_id (str): + InstanceId accessed in the connection. + database_id (str): + DatabaseId accessed in the connection. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + database_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SdkVersion(proto.Message): + r"""The version of the SDK used to run the job. + + Attributes: + version (str): + The version of the SDK used to run the job. + version_display_name (str): + A readable string describing the version of + the SDK. + sdk_support_status (google.cloud.dataflow_v1beta3.types.SdkVersion.SdkSupportStatus): + The support status for this SDK version. + """ + class SdkSupportStatus(proto.Enum): + r"""The support status of the SDK used to run the job.""" + UNKNOWN = 0 + SUPPORTED = 1 + STALE = 2 + DEPRECATED = 3 + UNSUPPORTED = 4 + + version: str = proto.Field( + proto.STRING, + number=1, + ) + version_display_name: str = proto.Field( + proto.STRING, + number=2, + ) + sdk_support_status: SdkSupportStatus = proto.Field( + proto.ENUM, + number=3, + enum=SdkSupportStatus, + ) + + +class JobMetadata(proto.Message): + r"""Metadata available primarily for filtering jobs. Will be + included in the ListJob response and Job SUMMARY view. + + Attributes: + sdk_version (google.cloud.dataflow_v1beta3.types.SdkVersion): + The SDK version used to run the job. + spanner_details (MutableSequence[google.cloud.dataflow_v1beta3.types.SpannerIODetails]): + Identification of a Spanner source used in + the Dataflow job. + bigquery_details (MutableSequence[google.cloud.dataflow_v1beta3.types.BigQueryIODetails]): + Identification of a BigQuery source used in + the Dataflow job. + big_table_details (MutableSequence[google.cloud.dataflow_v1beta3.types.BigTableIODetails]): + Identification of a Cloud Bigtable source + used in the Dataflow job. + pubsub_details (MutableSequence[google.cloud.dataflow_v1beta3.types.PubSubIODetails]): + Identification of a Pub/Sub source used in + the Dataflow job. + file_details (MutableSequence[google.cloud.dataflow_v1beta3.types.FileIODetails]): + Identification of a File source used in the + Dataflow job. + datastore_details (MutableSequence[google.cloud.dataflow_v1beta3.types.DatastoreIODetails]): + Identification of a Datastore source used in + the Dataflow job. + """ + + sdk_version: 'SdkVersion' = proto.Field( + proto.MESSAGE, + number=1, + message='SdkVersion', + ) + spanner_details: MutableSequence['SpannerIODetails'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='SpannerIODetails', + ) + bigquery_details: MutableSequence['BigQueryIODetails'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='BigQueryIODetails', + ) + big_table_details: MutableSequence['BigTableIODetails'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='BigTableIODetails', + ) + pubsub_details: MutableSequence['PubSubIODetails'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='PubSubIODetails', + ) + file_details: MutableSequence['FileIODetails'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='FileIODetails', + ) + datastore_details: MutableSequence['DatastoreIODetails'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='DatastoreIODetails', + ) + + +class ExecutionStageState(proto.Message): + r"""A message describing the state of a particular execution + stage. + + Attributes: + execution_stage_name (str): + The name of the execution stage. + execution_stage_state (google.cloud.dataflow_v1beta3.types.JobState): + Executions stage states allow the same set of + values as JobState. + current_state_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the stage transitioned to + this state. + """ + + execution_stage_name: str = proto.Field( + proto.STRING, + number=1, + ) + execution_stage_state: 'JobState' = proto.Field( + proto.ENUM, + number=2, + enum='JobState', + ) + current_state_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class PipelineDescription(proto.Message): + r"""A descriptive representation of submitted pipeline as well as + the executed form. This data is provided by the Dataflow + service for ease of visualizing the pipeline and interpreting + Dataflow provided metrics. + + Attributes: + original_pipeline_transform (MutableSequence[google.cloud.dataflow_v1beta3.types.TransformSummary]): + Description of each transform in the pipeline + and collections between them. + execution_pipeline_stage (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary]): + Description of each stage of execution of the + pipeline. + display_data (MutableSequence[google.cloud.dataflow_v1beta3.types.DisplayData]): + Pipeline level display data. + """ + + original_pipeline_transform: MutableSequence['TransformSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='TransformSummary', + ) + execution_pipeline_stage: MutableSequence['ExecutionStageSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ExecutionStageSummary', + ) + display_data: MutableSequence['DisplayData'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='DisplayData', + ) + + +class TransformSummary(proto.Message): + r"""Description of the type, names/ids, and input/outputs for a + transform. + + Attributes: + kind (google.cloud.dataflow_v1beta3.types.KindType): + Type of transform. + id (str): + SDK generated id of this transform instance. + name (str): + User provided name for this transform + instance. + display_data (MutableSequence[google.cloud.dataflow_v1beta3.types.DisplayData]): + Transform-specific display data. + output_collection_name (MutableSequence[str]): + User names for all collection outputs to + this transform. + input_collection_name (MutableSequence[str]): + User names for all collection inputs to this + transform. + """ + + kind: 'KindType' = proto.Field( + proto.ENUM, + number=1, + enum='KindType', + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( + proto.STRING, + number=3, + ) + display_data: MutableSequence['DisplayData'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='DisplayData', + ) + output_collection_name: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + input_collection_name: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + + +class ExecutionStageSummary(proto.Message): + r"""Description of the composing transforms, names/ids, and + input/outputs of a stage of execution. Some composing + transforms and sources may have been generated by the Dataflow + service during execution planning. + + Attributes: + name (str): + Dataflow service generated name for this + stage. + id (str): + Dataflow service generated id for this stage. + kind (google.cloud.dataflow_v1beta3.types.KindType): + Type of transform this stage is executing. + input_source (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.StageSource]): + Input sources for this stage. + output_source (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.StageSource]): + Output sources for this stage. + prerequisite_stage (MutableSequence[str]): + Other stages that must complete before this + stage can run. + component_transform (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.ComponentTransform]): + Transforms that comprise this execution + stage. + component_source (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.ComponentSource]): + Collections produced and consumed by + component transforms of this stage. + """ + + class StageSource(proto.Message): + r"""Description of an input or output of an execution stage. + + Attributes: + user_name (str): + Human-readable name for this source; may be + user or system generated. + name (str): + Dataflow service generated name for this + source. + original_transform_or_collection (str): + User name for the original user transform or + collection with which this source is most + closely associated. + size_bytes (int): + Size of the source, if measurable. + """ + + user_name: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + original_transform_or_collection: str = proto.Field( + proto.STRING, + number=3, + ) + size_bytes: int = proto.Field( + proto.INT64, + number=4, + ) + + class ComponentTransform(proto.Message): + r"""Description of a transform executed as part of an execution + stage. + + Attributes: + user_name (str): + Human-readable name for this transform; may + be user or system generated. + name (str): + Dataflow service generated name for this + source. + original_transform (str): + User name for the original user transform + with which this transform is most closely + associated. + """ + + user_name: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + original_transform: str = proto.Field( + proto.STRING, + number=3, + ) + + class ComponentSource(proto.Message): + r"""Description of an interstitial value between transforms in an + execution stage. + + Attributes: + user_name (str): + Human-readable name for this transform; may + be user or system generated. + name (str): + Dataflow service generated name for this + source. + original_transform_or_collection (str): + User name for the original user transform or + collection with which this source is most + closely associated. + """ + + user_name: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + original_transform_or_collection: str = proto.Field( + proto.STRING, + number=3, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + id: str = proto.Field( + proto.STRING, + number=2, + ) + kind: 'KindType' = proto.Field( + proto.ENUM, + number=3, + enum='KindType', + ) + input_source: MutableSequence[StageSource] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=StageSource, + ) + output_source: MutableSequence[StageSource] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=StageSource, + ) + prerequisite_stage: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + component_transform: MutableSequence[ComponentTransform] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=ComponentTransform, + ) + component_source: MutableSequence[ComponentSource] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=ComponentSource, + ) + + +class DisplayData(proto.Message): + r"""Data provided with a pipeline or transform to provide + descriptive info. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + The key identifying the display data. + This is intended to be used as a label for the + display data when viewed in a dax monitoring + system. + namespace (str): + The namespace for the key. This is usually a + class name or programming language namespace + (i.e. python module) which defines the display + data. This allows a dax monitoring system to + specially handle the data and perform custom + rendering. + str_value (str): + Contains value if the data is of string type. + + This field is a member of `oneof`_ ``Value``. + int64_value (int): + Contains value if the data is of int64 type. + + This field is a member of `oneof`_ ``Value``. + float_value (float): + Contains value if the data is of float type. + + This field is a member of `oneof`_ ``Value``. + java_class_value (str): + Contains value if the data is of java class + type. + + This field is a member of `oneof`_ ``Value``. + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + Contains value if the data is of timestamp + type. + + This field is a member of `oneof`_ ``Value``. + duration_value (google.protobuf.duration_pb2.Duration): + Contains value if the data is of duration + type. + + This field is a member of `oneof`_ ``Value``. + bool_value (bool): + Contains value if the data is of a boolean + type. + + This field is a member of `oneof`_ ``Value``. + short_str_value (str): + A possible additional shorter value to display. For example + a java_class_name_value of com.mypackage.MyDoFn will be + stored with MyDoFn as the short_str_value and + com.mypackage.MyDoFn as the java_class_name value. + short_str_value can be displayed and java_class_name_value + will be displayed as a tooltip. + url (str): + An optional full URL. + label (str): + An optional label to display in a dax UI for + the element. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + namespace: str = proto.Field( + proto.STRING, + number=2, + ) + str_value: str = proto.Field( + proto.STRING, + number=4, + oneof='Value', + ) + int64_value: int = proto.Field( + proto.INT64, + number=5, + oneof='Value', + ) + float_value: float = proto.Field( + proto.FLOAT, + number=6, + oneof='Value', + ) + java_class_value: str = proto.Field( + proto.STRING, + number=7, + oneof='Value', + ) + timestamp_value: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + oneof='Value', + message=timestamp_pb2.Timestamp, + ) + duration_value: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=9, + oneof='Value', + message=duration_pb2.Duration, + ) + bool_value: bool = proto.Field( + proto.BOOL, + number=10, + oneof='Value', + ) + short_str_value: str = proto.Field( + proto.STRING, + number=11, + ) + url: str = proto.Field( + proto.STRING, + number=12, + ) + label: str = proto.Field( + proto.STRING, + number=13, + ) + + +class Step(proto.Message): + r"""Defines a particular step within a Cloud Dataflow job. + + A job consists of multiple steps, each of which performs some + specific operation as part of the overall job. Data is typically + passed from one step to another as part of the job. + + Here's an example of a sequence of steps which together implement a + Map-Reduce job: + + - Read a collection of data from some source, parsing the + collection's elements. + + - Validate the elements. + + - Apply a user-defined function to map each element to some value + and extract an element-specific key value. + + - Group elements with the same key into a single element with that + key, transforming a multiply-keyed collection into a + uniquely-keyed collection. + + - Write the elements out to some data sink. + + Note that the Cloud Dataflow service may be used to run many + different types of jobs, not just Map-Reduce. + + Attributes: + kind (str): + The kind of step in the Cloud Dataflow job. + name (str): + The name that identifies the step. This must + be unique for each step with respect to all + other steps in the Cloud Dataflow job. + properties (google.protobuf.struct_pb2.Struct): + Named properties associated with the step. Each kind of + predefined step has its own required set of properties. Must + be provided on Create. Only retrieved with JOB_VIEW_ALL. + """ + + kind: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + properties: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +class JobExecutionInfo(proto.Message): + r"""Additional information about how a Cloud Dataflow job will be + executed that isn't contained in the submitted job. + + Attributes: + stages (MutableMapping[str, google.cloud.dataflow_v1beta3.types.JobExecutionStageInfo]): + A mapping from each stage to the information + about that stage. + """ + + stages: MutableMapping[str, 'JobExecutionStageInfo'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message='JobExecutionStageInfo', + ) + + +class JobExecutionStageInfo(proto.Message): + r"""Contains information about how a particular + [google.dataflow.v1beta3.Step][google.dataflow.v1beta3.Step] will be + executed. + + Attributes: + step_name (MutableSequence[str]): + The steps associated with the execution + stage. Note that stages may have several steps, + and that a given step might be run by more than + one stage. + """ + + step_name: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class CreateJobRequest(proto.Message): + r"""Request to create a Cloud Dataflow job. + + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + job belongs to. + job (google.cloud.dataflow_v1beta3.types.Job): + The job to create. + view (google.cloud.dataflow_v1beta3.types.JobView): + The level of information requested in + response. + replace_job_id (str): + Deprecated. This field is now in the Job + message. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job: 'Job' = proto.Field( + proto.MESSAGE, + number=2, + message='Job', + ) + view: 'JobView' = proto.Field( + proto.ENUM, + number=3, + enum='JobView', + ) + replace_job_id: str = proto.Field( + proto.STRING, + number=4, + ) + location: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetJobRequest(proto.Message): + r"""Request to get the state of a Cloud Dataflow job. + + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + job belongs to. + job_id (str): + The job ID. + view (google.cloud.dataflow_v1beta3.types.JobView): + The level of information requested in + response. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + view: 'JobView' = proto.Field( + proto.ENUM, + number=3, + enum='JobView', + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateJobRequest(proto.Message): + r"""Request to update a Cloud Dataflow job. + + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + job belongs to. + job_id (str): + The job ID. + job (google.cloud.dataflow_v1beta3.types.Job): + The updated job. + Only the job state is updatable; other fields + will be ignored. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + job: 'Job' = proto.Field( + proto.MESSAGE, + number=3, + message='Job', + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListJobsRequest(proto.Message): + r"""Request to list Cloud Dataflow jobs. + + Attributes: + filter (google.cloud.dataflow_v1beta3.types.ListJobsRequest.Filter): + The kind of filter to use. + project_id (str): + The project which owns the jobs. + view (google.cloud.dataflow_v1beta3.types.JobView): + Deprecated. ListJobs always returns summaries + now. Use GetJob for other JobViews. + page_size (int): + If there are many jobs, limit response to at most this many. + The actual number of jobs returned will be the lesser of + max_responses and an unspecified server-defined limit. + page_token (str): + Set this to the 'next_page_token' field of a previous + response to request additional results in a long list. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains this job. + """ + class Filter(proto.Enum): + r"""This field filters out and returns jobs in the specified job + state. The order of data returned is determined by the filter + used, and is subject to change. + """ + UNKNOWN = 0 + ALL = 1 + TERMINATED = 2 + ACTIVE = 3 + + filter: Filter = proto.Field( + proto.ENUM, + number=5, + enum=Filter, + ) + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + view: 'JobView' = proto.Field( + proto.ENUM, + number=2, + enum='JobView', + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + location: str = proto.Field( + proto.STRING, + number=17, + ) + + +class FailedLocation(proto.Message): + r"""Indicates which [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + failed to respond to a request for data. + + Attributes: + name (str): + The name of the [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that failed to respond. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListJobsResponse(proto.Message): + r"""Response to a request to list Cloud Dataflow jobs in a + project. This might be a partial response, depending on the page + size in the ListJobsRequest. However, if the project does not + have any jobs, an instance of ListJobsResponse is not returned + and the requests's response body is empty {}. + + Attributes: + jobs (MutableSequence[google.cloud.dataflow_v1beta3.types.Job]): + A subset of the requested job information. + next_page_token (str): + Set if there may be more results than fit in + this response. + failed_location (MutableSequence[google.cloud.dataflow_v1beta3.types.FailedLocation]): + Zero or more messages describing the [regional endpoints] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that failed to respond. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence['Job'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Job', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + failed_location: MutableSequence['FailedLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='FailedLocation', + ) + + +class SnapshotJobRequest(proto.Message): + r"""Request to create a snapshot of a job. + + Attributes: + project_id (str): + The project which owns the job to be + snapshotted. + job_id (str): + The job to be snapshotted. + ttl (google.protobuf.duration_pb2.Duration): + TTL for the snapshot. + location (str): + The location that contains this job. + snapshot_sources (bool): + If true, perform snapshots for sources which + support this. + description (str): + User specified description of the snapshot. + Maybe empty. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + snapshot_sources: bool = proto.Field( + proto.BOOL, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=6, + ) + + +class CheckActiveJobsRequest(proto.Message): + r"""Request to check is active jobs exists for a project + + Attributes: + project_id (str): + The project which owns the jobs. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CheckActiveJobsResponse(proto.Message): + r"""Response for CheckActiveJobsRequest. + + Attributes: + active_jobs_exist (bool): + If True, active jobs exists for project. + False otherwise. + """ + + active_jobs_exist: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/messages.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/messages.py new file mode 100644 index 0000000..ac4db6d --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/messages.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.dataflow.v1beta3', + manifest={ + 'JobMessageImportance', + 'JobMessage', + 'StructuredMessage', + 'AutoscalingEvent', + 'ListJobMessagesRequest', + 'ListJobMessagesResponse', + }, +) + + +class JobMessageImportance(proto.Enum): + r"""Indicates the importance of the message.""" + JOB_MESSAGE_IMPORTANCE_UNKNOWN = 0 + JOB_MESSAGE_DEBUG = 1 + JOB_MESSAGE_DETAILED = 2 + JOB_MESSAGE_BASIC = 5 + JOB_MESSAGE_WARNING = 3 + JOB_MESSAGE_ERROR = 4 + + +class JobMessage(proto.Message): + r"""A particular message pertaining to a Dataflow job. + + Attributes: + id (str): + Deprecated. + time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the message. + message_text (str): + The text of the message. + message_importance (google.cloud.dataflow_v1beta3.types.JobMessageImportance): + Importance level of the message. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + message_text: str = proto.Field( + proto.STRING, + number=3, + ) + message_importance: 'JobMessageImportance' = proto.Field( + proto.ENUM, + number=4, + enum='JobMessageImportance', + ) + + +class StructuredMessage(proto.Message): + r"""A rich message format, including a human readable string, a + key for identifying the message, and structured data associated + with the message for programmatic consumption. + + Attributes: + message_text (str): + Human-readable version of message. + message_key (str): + Identifier for this message type. Used by + external systems to internationalize or + personalize message. + parameters (MutableSequence[google.cloud.dataflow_v1beta3.types.StructuredMessage.Parameter]): + The structured data associated with this + message. + """ + + class Parameter(proto.Message): + r"""Structured data associated with this message. + + Attributes: + key (str): + Key or name for this parameter. + value (google.protobuf.struct_pb2.Value): + Value for this parameter. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + value: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Value, + ) + + message_text: str = proto.Field( + proto.STRING, + number=1, + ) + message_key: str = proto.Field( + proto.STRING, + number=2, + ) + parameters: MutableSequence[Parameter] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Parameter, + ) + + +class AutoscalingEvent(proto.Message): + r"""A structured message reporting an autoscaling decision made + by the Dataflow service. + + Attributes: + current_num_workers (int): + The current number of workers the job has. + target_num_workers (int): + The target number of workers the worker pool + wants to resize to use. + event_type (google.cloud.dataflow_v1beta3.types.AutoscalingEvent.AutoscalingEventType): + The type of autoscaling event to report. + description (google.cloud.dataflow_v1beta3.types.StructuredMessage): + A message describing why the system decided + to adjust the current number of workers, why it + failed, or why the system decided to not make + any changes to the number of workers. + time (google.protobuf.timestamp_pb2.Timestamp): + The time this event was emitted to indicate a new target or + current num_workers value. + worker_pool (str): + A short and friendly name for the worker pool + this event refers to. + """ + class AutoscalingEventType(proto.Enum): + r"""Indicates the type of autoscaling event.""" + TYPE_UNKNOWN = 0 + TARGET_NUM_WORKERS_CHANGED = 1 + CURRENT_NUM_WORKERS_CHANGED = 2 + ACTUATION_FAILURE = 3 + NO_CHANGE = 4 + + current_num_workers: int = proto.Field( + proto.INT64, + number=1, + ) + target_num_workers: int = proto.Field( + proto.INT64, + number=2, + ) + event_type: AutoscalingEventType = proto.Field( + proto.ENUM, + number=3, + enum=AutoscalingEventType, + ) + description: 'StructuredMessage' = proto.Field( + proto.MESSAGE, + number=4, + message='StructuredMessage', + ) + time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + worker_pool: str = proto.Field( + proto.STRING, + number=7, + ) + + +class ListJobMessagesRequest(proto.Message): + r"""Request to list job messages. Up to max_results messages will be + returned in the time range specified starting with the oldest + messages first. If no time range is specified the results with start + with the oldest message. + + Attributes: + project_id (str): + A project id. + job_id (str): + The job to get messages about. + minimum_importance (google.cloud.dataflow_v1beta3.types.JobMessageImportance): + Filter to only get messages with importance + >= level + page_size (int): + If specified, determines the maximum number + of messages to return. If unspecified, the + service may choose an appropriate default, or + may return an arbitrarily large number of + results. + page_token (str): + If supplied, this should be the value of next_page_token + returned by an earlier call. This will cause the next page + of results to be returned. + start_time (google.protobuf.timestamp_pb2.Timestamp): + If specified, return only messages with timestamps >= + start_time. The default is the job creation time (i.e. + beginning of messages). + end_time (google.protobuf.timestamp_pb2.Timestamp): + Return only messages with timestamps < end_time. The default + is now (i.e. return up to the latest messages available). + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + minimum_importance: 'JobMessageImportance' = proto.Field( + proto.ENUM, + number=3, + enum='JobMessageImportance', + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + location: str = proto.Field( + proto.STRING, + number=8, + ) + + +class ListJobMessagesResponse(proto.Message): + r"""Response to a request to list job messages. + + Attributes: + job_messages (MutableSequence[google.cloud.dataflow_v1beta3.types.JobMessage]): + Messages in ascending timestamp order. + next_page_token (str): + The token to obtain the next page of results + if there are more. + autoscaling_events (MutableSequence[google.cloud.dataflow_v1beta3.types.AutoscalingEvent]): + Autoscaling events in ascending timestamp + order. + """ + + @property + def raw_page(self): + return self + + job_messages: MutableSequence['JobMessage'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='JobMessage', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + autoscaling_events: MutableSequence['AutoscalingEvent'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='AutoscalingEvent', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/metrics.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/metrics.py new file mode 100644 index 0000000..b2aaa9b --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/metrics.py @@ -0,0 +1,619 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.dataflow.v1beta3', + manifest={ + 'ExecutionState', + 'MetricStructuredName', + 'MetricUpdate', + 'GetJobMetricsRequest', + 'JobMetrics', + 'GetJobExecutionDetailsRequest', + 'ProgressTimeseries', + 'StageSummary', + 'JobExecutionDetails', + 'GetStageExecutionDetailsRequest', + 'WorkItemDetails', + 'WorkerDetails', + 'StageExecutionDetails', + }, +) + + +class ExecutionState(proto.Enum): + r"""The state of some component of job execution.""" + EXECUTION_STATE_UNKNOWN = 0 + EXECUTION_STATE_NOT_STARTED = 1 + EXECUTION_STATE_RUNNING = 2 + EXECUTION_STATE_SUCCEEDED = 3 + EXECUTION_STATE_FAILED = 4 + EXECUTION_STATE_CANCELLED = 5 + + +class MetricStructuredName(proto.Message): + r"""Identifies a metric, by describing the source which generated + the metric. + + Attributes: + origin (str): + Origin (namespace) of metric name. May be + blank for user-define metrics; will be + "dataflow" for metrics defined by the Dataflow + service or SDK. + name (str): + Worker-defined metric name. + context (MutableMapping[str, str]): + Zero or more labeled fields which identify the part of the + job this metric is associated with, such as the name of a + step or collection. + + For example, built-in counters associated with steps will + have context['step'] = . Counters associated with + PCollections in the SDK will have context['pcollection'] = . + """ + + origin: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + ) + context: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class MetricUpdate(proto.Message): + r"""Describes the state of a metric. + + Attributes: + name (google.cloud.dataflow_v1beta3.types.MetricStructuredName): + Name of the metric. + kind (str): + Metric aggregation kind. The possible metric + aggregation kinds are "Sum", "Max", "Min", + "Mean", "Set", "And", "Or", and "Distribution". + The specified aggregation kind is + case-insensitive. + If omitted, this is not an aggregated value but + instead a single metric sample value. + cumulative (bool): + True if this metric is reported as the total + cumulative aggregate value accumulated since the + worker started working on this WorkItem. By + default this is false, indicating that this + metric is reported as a delta that is not + associated with any WorkItem. + scalar (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for + aggregation kinds "Sum", "Max", "Min", "And", + and "Or". The possible value types are Long, + Double, and Boolean. + mean_sum (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for the "Mean" aggregation + kind. This holds the sum of the aggregated values and is + used in combination with mean_count below to obtain the + actual mean aggregate value. The only possible value types + are Long and Double. + mean_count (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for the "Mean" aggregation + kind. This holds the count of the aggregated values and is + used in combination with mean_sum above to obtain the actual + mean aggregate value. The only possible value type is Long. + set (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for the "Set" + aggregation kind. The only possible value type + is a list of Values whose type can be Long, + Double, or String, according to the metric's + type. All Values in the list must be of the + same type. + distribution (google.protobuf.struct_pb2.Value): + A struct value describing properties of a + distribution of numeric values. + gauge (google.protobuf.struct_pb2.Value): + A struct value describing properties of a + Gauge. Metrics of gauge type show the value of a + metric across time, and is aggregated based on + the newest value. + internal (google.protobuf.struct_pb2.Value): + Worker-computed aggregate value for internal + use by the Dataflow service. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp associated with the metric value. + Optional when workers are reporting work + progress; it will be filled in responses from + the metrics API. + """ + + name: 'MetricStructuredName' = proto.Field( + proto.MESSAGE, + number=1, + message='MetricStructuredName', + ) + kind: str = proto.Field( + proto.STRING, + number=2, + ) + cumulative: bool = proto.Field( + proto.BOOL, + number=3, + ) + scalar: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Value, + ) + mean_sum: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Value, + ) + mean_count: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Value, + ) + set: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Value, + ) + distribution: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=11, + message=struct_pb2.Value, + ) + gauge: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=12, + message=struct_pb2.Value, + ) + internal: struct_pb2.Value = proto.Field( + proto.MESSAGE, + number=8, + message=struct_pb2.Value, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + + +class GetJobMetricsRequest(proto.Message): + r"""Request to get job metrics. + + Attributes: + project_id (str): + A project id. + job_id (str): + The job to get metrics for. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Return only metric data that has changed + since this time. Default is to return all + information about all metrics for the job. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + + +class JobMetrics(proto.Message): + r"""JobMetrics contains a collection of metrics describing the + detailed progress of a Dataflow job. Metrics correspond to + user-defined and system-defined metrics in the job. + + This resource captures only the most recent values of each + metric; time-series data can be queried for them (under the same + metric names) from Cloud Monitoring. + + Attributes: + metric_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp as of which metric values are + current. + metrics (MutableSequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): + All metrics for this job. + """ + + metric_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + metrics: MutableSequence['MetricUpdate'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='MetricUpdate', + ) + + +class GetJobExecutionDetailsRequest(proto.Message): + r"""Request to get job execution details. + + Attributes: + project_id (str): + A project id. + job_id (str): + The job to get execution details for. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + page_size (int): + If specified, determines the maximum number + of stages to return. If unspecified, the + service may choose an appropriate default, or + may return an arbitrarily large number of + results. + page_token (str): + If supplied, this should be the value of next_page_token + returned by an earlier call. This will cause the next page + of results to be returned. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ProgressTimeseries(proto.Message): + r"""Information about the progress of some component of job + execution. + + Attributes: + current_progress (float): + The current progress of the component, in the range [0,1]. + data_points (MutableSequence[google.cloud.dataflow_v1beta3.types.ProgressTimeseries.Point]): + History of progress for the component. + Points are sorted by time. + """ + + class Point(proto.Message): + r"""A point in the timeseries. + + Attributes: + time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp of the point. + value (float): + The value of the point. + """ + + time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + value: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + current_progress: float = proto.Field( + proto.DOUBLE, + number=1, + ) + data_points: MutableSequence[Point] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Point, + ) + + +class StageSummary(proto.Message): + r"""Information about a particular execution stage of a job. + + Attributes: + stage_id (str): + ID of this stage + state (google.cloud.dataflow_v1beta3.types.ExecutionState): + State of this stage. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of this stage. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of this stage. + If the work item is completed, this is the + actual end time of the stage. Otherwise, it is + the predicted end time. + progress (google.cloud.dataflow_v1beta3.types.ProgressTimeseries): + Progress for this stage. + Only applicable to Batch jobs. + metrics (MutableSequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): + Metrics for this stage. + """ + + stage_id: str = proto.Field( + proto.STRING, + number=1, + ) + state: 'ExecutionState' = proto.Field( + proto.ENUM, + number=2, + enum='ExecutionState', + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + progress: 'ProgressTimeseries' = proto.Field( + proto.MESSAGE, + number=5, + message='ProgressTimeseries', + ) + metrics: MutableSequence['MetricUpdate'] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='MetricUpdate', + ) + + +class JobExecutionDetails(proto.Message): + r"""Information about the execution of a job. + + Attributes: + stages (MutableSequence[google.cloud.dataflow_v1beta3.types.StageSummary]): + The stages of the job execution. + next_page_token (str): + If present, this response does not contain all requested + tasks. To obtain the next page of results, repeat the + request with page_token set to this value. + """ + + @property + def raw_page(self): + return self + + stages: MutableSequence['StageSummary'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='StageSummary', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetStageExecutionDetailsRequest(proto.Message): + r"""Request to get information about a particular execution stage + of a job. Currently only tracked for Batch jobs. + + Attributes: + project_id (str): + A project id. + job_id (str): + The job to get execution details for. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + that contains the job specified by job_id. + stage_id (str): + The stage for which to fetch information. + page_size (int): + If specified, determines the maximum number + of work items to return. If unspecified, the + service may choose an appropriate default, or + may return an arbitrarily large number of + results. + page_token (str): + If supplied, this should be the value of next_page_token + returned by an earlier call. This will cause the next page + of results to be returned. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Lower time bound of work items to include, by + start time. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Upper time bound of work items to include, by + start time. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=2, + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + stage_id: str = proto.Field( + proto.STRING, + number=4, + ) + page_size: int = proto.Field( + proto.INT32, + number=5, + ) + page_token: str = proto.Field( + proto.STRING, + number=6, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + + +class WorkItemDetails(proto.Message): + r"""Information about an individual work item execution. + + Attributes: + task_id (str): + Name of this work item. + attempt_id (str): + Attempt ID of this work item + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of this work item attempt. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of this work item attempt. + If the work item is completed, this is the + actual end time of the work item. Otherwise, it + is the predicted end time. + state (google.cloud.dataflow_v1beta3.types.ExecutionState): + State of this work item. + progress (google.cloud.dataflow_v1beta3.types.ProgressTimeseries): + Progress of this work item. + metrics (MutableSequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): + Metrics for this work item. + """ + + task_id: str = proto.Field( + proto.STRING, + number=1, + ) + attempt_id: str = proto.Field( + proto.STRING, + number=2, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + state: 'ExecutionState' = proto.Field( + proto.ENUM, + number=5, + enum='ExecutionState', + ) + progress: 'ProgressTimeseries' = proto.Field( + proto.MESSAGE, + number=6, + message='ProgressTimeseries', + ) + metrics: MutableSequence['MetricUpdate'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='MetricUpdate', + ) + + +class WorkerDetails(proto.Message): + r"""Information about a worker + + Attributes: + worker_name (str): + Name of this worker + work_items (MutableSequence[google.cloud.dataflow_v1beta3.types.WorkItemDetails]): + Work items processed by this worker, sorted + by time. + """ + + worker_name: str = proto.Field( + proto.STRING, + number=1, + ) + work_items: MutableSequence['WorkItemDetails'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='WorkItemDetails', + ) + + +class StageExecutionDetails(proto.Message): + r"""Information about the workers and work items within a stage. + + Attributes: + workers (MutableSequence[google.cloud.dataflow_v1beta3.types.WorkerDetails]): + Workers that have done work on the stage. + next_page_token (str): + If present, this response does not contain all requested + tasks. To obtain the next page of results, repeat the + request with page_token set to this value. + """ + + @property + def raw_page(self): + return self + + workers: MutableSequence['WorkerDetails'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='WorkerDetails', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/snapshots.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/snapshots.py new file mode 100644 index 0000000..96010b1 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/snapshots.py @@ -0,0 +1,253 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.dataflow.v1beta3', + manifest={ + 'SnapshotState', + 'PubsubSnapshotMetadata', + 'Snapshot', + 'GetSnapshotRequest', + 'DeleteSnapshotRequest', + 'DeleteSnapshotResponse', + 'ListSnapshotsRequest', + 'ListSnapshotsResponse', + }, +) + + +class SnapshotState(proto.Enum): + r"""Snapshot state.""" + UNKNOWN_SNAPSHOT_STATE = 0 + PENDING = 1 + RUNNING = 2 + READY = 3 + FAILED = 4 + DELETED = 5 + + +class PubsubSnapshotMetadata(proto.Message): + r"""Represents a Pubsub snapshot. + + Attributes: + topic_name (str): + The name of the Pubsub topic. + snapshot_name (str): + The name of the Pubsub snapshot. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + The expire time of the Pubsub snapshot. + """ + + topic_name: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_name: str = proto.Field( + proto.STRING, + number=2, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class Snapshot(proto.Message): + r"""Represents a snapshot of a job. + + Attributes: + id (str): + The unique ID of this snapshot. + project_id (str): + The project this snapshot belongs to. + source_job_id (str): + The job this snapshot was created from. + creation_time (google.protobuf.timestamp_pb2.Timestamp): + The time this snapshot was created. + ttl (google.protobuf.duration_pb2.Duration): + The time after which this snapshot will be + automatically deleted. + state (google.cloud.dataflow_v1beta3.types.SnapshotState): + State of the snapshot. + pubsub_metadata (MutableSequence[google.cloud.dataflow_v1beta3.types.PubsubSnapshotMetadata]): + Pub/Sub snapshot metadata. + description (str): + User specified description of the snapshot. + Maybe empty. + disk_size_bytes (int): + The disk byte size of the snapshot. Only + available for snapshots in READY state. + region (str): + Cloud region where this snapshot lives in, + e.g., "us-central1". + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + project_id: str = proto.Field( + proto.STRING, + number=2, + ) + source_job_id: str = proto.Field( + proto.STRING, + number=3, + ) + creation_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=5, + message=duration_pb2.Duration, + ) + state: 'SnapshotState' = proto.Field( + proto.ENUM, + number=6, + enum='SnapshotState', + ) + pubsub_metadata: MutableSequence['PubsubSnapshotMetadata'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='PubsubSnapshotMetadata', + ) + description: str = proto.Field( + proto.STRING, + number=8, + ) + disk_size_bytes: int = proto.Field( + proto.INT64, + number=9, + ) + region: str = proto.Field( + proto.STRING, + number=10, + ) + + +class GetSnapshotRequest(proto.Message): + r"""Request to get information about a snapshot + + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + snapshot belongs to. + snapshot_id (str): + The ID of the snapshot. + location (str): + The location that contains this snapshot. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_id: str = proto.Field( + proto.STRING, + number=2, + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteSnapshotRequest(proto.Message): + r"""Request to delete a snapshot. + + Attributes: + project_id (str): + The ID of the Cloud Platform project that the + snapshot belongs to. + snapshot_id (str): + The ID of the snapshot. + location (str): + The location that contains this snapshot. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot_id: str = proto.Field( + proto.STRING, + number=2, + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteSnapshotResponse(proto.Message): + r"""Response from deleting a snapshot. + """ + + +class ListSnapshotsRequest(proto.Message): + r"""Request to list snapshots. + + Attributes: + project_id (str): + The project ID to list snapshots for. + job_id (str): + If specified, list snapshots created from + this job. + location (str): + The location to list snapshots in. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_id: str = proto.Field( + proto.STRING, + number=3, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListSnapshotsResponse(proto.Message): + r"""List of snapshots. + + Attributes: + snapshots (MutableSequence[google.cloud.dataflow_v1beta3.types.Snapshot]): + Returned snapshots. + """ + + snapshots: MutableSequence['Snapshot'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Snapshot', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/streaming.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/streaming.py new file mode 100644 index 0000000..4656222 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/streaming.py @@ -0,0 +1,501 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.dataflow.v1beta3', + manifest={ + 'TopologyConfig', + 'PubsubLocation', + 'StreamingStageLocation', + 'StreamingSideInputLocation', + 'CustomSourceLocation', + 'StreamLocation', + 'StateFamilyConfig', + 'ComputationTopology', + 'KeyRangeLocation', + 'MountedDataDisk', + 'DataDiskAssignment', + 'KeyRangeDataDiskAssignment', + 'StreamingComputationRanges', + 'StreamingApplianceSnapshotConfig', + }, +) + + +class TopologyConfig(proto.Message): + r"""Global topology of the streaming Dataflow job, including all + computations and their sharded locations. + + Attributes: + computations (MutableSequence[google.cloud.dataflow_v1beta3.types.ComputationTopology]): + The computations associated with a streaming + Dataflow job. + data_disk_assignments (MutableSequence[google.cloud.dataflow_v1beta3.types.DataDiskAssignment]): + The disks assigned to a streaming Dataflow + job. + user_stage_to_computation_name_map (MutableMapping[str, str]): + Maps user stage names to stable computation + names. + forwarding_key_bits (int): + The size (in bits) of keys that will be + assigned to source messages. + persistent_state_version (int): + Version number for persistent state. + """ + + computations: MutableSequence['ComputationTopology'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ComputationTopology', + ) + data_disk_assignments: MutableSequence['DataDiskAssignment'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='DataDiskAssignment', + ) + user_stage_to_computation_name_map: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + forwarding_key_bits: int = proto.Field( + proto.INT32, + number=4, + ) + persistent_state_version: int = proto.Field( + proto.INT32, + number=5, + ) + + +class PubsubLocation(proto.Message): + r"""Identifies a pubsub location to use for transferring data + into or out of a streaming Dataflow job. + + Attributes: + topic (str): + A pubsub topic, in the form of + "pubsub.googleapis.com/topics//". + subscription (str): + A pubsub subscription, in the form of + "pubsub.googleapis.com/subscriptions//". + timestamp_label (str): + If set, contains a pubsub label from which to + extract record timestamps. If left empty, record + timestamps will be generated upon arrival. + id_label (str): + If set, contains a pubsub label from which to + extract record ids. If left empty, record + deduplication will be strictly best effort. + drop_late_data (bool): + Indicates whether the pipeline allows + late-arriving data. + tracking_subscription (str): + If set, specifies the pubsub subscription + that will be used for tracking custom time + timestamps for watermark estimation. + with_attributes (bool): + If true, then the client has requested to get + pubsub attributes. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + subscription: str = proto.Field( + proto.STRING, + number=2, + ) + timestamp_label: str = proto.Field( + proto.STRING, + number=3, + ) + id_label: str = proto.Field( + proto.STRING, + number=4, + ) + drop_late_data: bool = proto.Field( + proto.BOOL, + number=5, + ) + tracking_subscription: str = proto.Field( + proto.STRING, + number=6, + ) + with_attributes: bool = proto.Field( + proto.BOOL, + number=7, + ) + + +class StreamingStageLocation(proto.Message): + r"""Identifies the location of a streaming computation stage, for + stage-to-stage communication. + + Attributes: + stream_id (str): + Identifies the particular stream within the + streaming Dataflow job. + """ + + stream_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class StreamingSideInputLocation(proto.Message): + r"""Identifies the location of a streaming side input. + + Attributes: + tag (str): + Identifies the particular side input within + the streaming Dataflow job. + state_family (str): + Identifies the state family where this side + input is stored. + """ + + tag: str = proto.Field( + proto.STRING, + number=1, + ) + state_family: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CustomSourceLocation(proto.Message): + r"""Identifies the location of a custom souce. + + Attributes: + stateful (bool): + Whether this source is stateful. + """ + + stateful: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +class StreamLocation(proto.Message): + r"""Describes a stream of data, either as input to be processed + or as output of a streaming Dataflow job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + streaming_stage_location (google.cloud.dataflow_v1beta3.types.StreamingStageLocation): + The stream is part of another computation + within the current streaming Dataflow job. + + This field is a member of `oneof`_ ``location``. + pubsub_location (google.cloud.dataflow_v1beta3.types.PubsubLocation): + The stream is a pubsub stream. + + This field is a member of `oneof`_ ``location``. + side_input_location (google.cloud.dataflow_v1beta3.types.StreamingSideInputLocation): + The stream is a streaming side input. + + This field is a member of `oneof`_ ``location``. + custom_source_location (google.cloud.dataflow_v1beta3.types.CustomSourceLocation): + The stream is a custom source. + + This field is a member of `oneof`_ ``location``. + """ + + streaming_stage_location: 'StreamingStageLocation' = proto.Field( + proto.MESSAGE, + number=1, + oneof='location', + message='StreamingStageLocation', + ) + pubsub_location: 'PubsubLocation' = proto.Field( + proto.MESSAGE, + number=2, + oneof='location', + message='PubsubLocation', + ) + side_input_location: 'StreamingSideInputLocation' = proto.Field( + proto.MESSAGE, + number=3, + oneof='location', + message='StreamingSideInputLocation', + ) + custom_source_location: 'CustomSourceLocation' = proto.Field( + proto.MESSAGE, + number=4, + oneof='location', + message='CustomSourceLocation', + ) + + +class StateFamilyConfig(proto.Message): + r"""State family configuration. + + Attributes: + state_family (str): + The state family value. + is_read (bool): + If true, this family corresponds to a read + operation. + """ + + state_family: str = proto.Field( + proto.STRING, + number=1, + ) + is_read: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ComputationTopology(proto.Message): + r"""All configuration data for a particular Computation. + + Attributes: + system_stage_name (str): + The system stage name. + computation_id (str): + The ID of the computation. + key_ranges (MutableSequence[google.cloud.dataflow_v1beta3.types.KeyRangeLocation]): + The key ranges processed by the computation. + inputs (MutableSequence[google.cloud.dataflow_v1beta3.types.StreamLocation]): + The inputs to the computation. + outputs (MutableSequence[google.cloud.dataflow_v1beta3.types.StreamLocation]): + The outputs from the computation. + state_families (MutableSequence[google.cloud.dataflow_v1beta3.types.StateFamilyConfig]): + The state family values. + """ + + system_stage_name: str = proto.Field( + proto.STRING, + number=1, + ) + computation_id: str = proto.Field( + proto.STRING, + number=5, + ) + key_ranges: MutableSequence['KeyRangeLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='KeyRangeLocation', + ) + inputs: MutableSequence['StreamLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StreamLocation', + ) + outputs: MutableSequence['StreamLocation'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='StreamLocation', + ) + state_families: MutableSequence['StateFamilyConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='StateFamilyConfig', + ) + + +class KeyRangeLocation(proto.Message): + r"""Location information for a specific key-range of a sharded + computation. Currently we only support UTF-8 character splits to + simplify encoding into JSON. + + Attributes: + start (str): + The start (inclusive) of the key range. + end (str): + The end (exclusive) of the key range. + delivery_endpoint (str): + The physical location of this range + assignment to be used for streaming computation + cross-worker message delivery. + data_disk (str): + The name of the data disk where data for this + range is stored. This name is local to the + Google Cloud Platform project and uniquely + identifies the disk within that project, for + example + "myproject-1014-104817-4c2-harness-0-disk-1". + deprecated_persistent_directory (str): + DEPRECATED. The location of the persistent + state for this range, as a persistent directory + in the worker local filesystem. + """ + + start: str = proto.Field( + proto.STRING, + number=1, + ) + end: str = proto.Field( + proto.STRING, + number=2, + ) + delivery_endpoint: str = proto.Field( + proto.STRING, + number=3, + ) + data_disk: str = proto.Field( + proto.STRING, + number=5, + ) + deprecated_persistent_directory: str = proto.Field( + proto.STRING, + number=4, + ) + + +class MountedDataDisk(proto.Message): + r"""Describes mounted data disk. + + Attributes: + data_disk (str): + The name of the data disk. + This name is local to the Google Cloud Platform + project and uniquely identifies the disk within + that project, for example + "myproject-1014-104817-4c2-harness-0-disk-1". + """ + + data_disk: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DataDiskAssignment(proto.Message): + r"""Data disk assignment for a given VM instance. + + Attributes: + vm_instance (str): + VM instance name the data disks mounted to, + for example + "myproject-1014-104817-4c2-harness-0". + data_disks (MutableSequence[str]): + Mounted data disks. The order is important a + data disk's 0-based index in this list defines + which persistent directory the disk is mounted + to, for example the list of { + "myproject-1014-104817-4c2-harness-0-disk-0" }, + { "myproject-1014-104817-4c2-harness-0-disk-1" + }. + """ + + vm_instance: str = proto.Field( + proto.STRING, + number=1, + ) + data_disks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class KeyRangeDataDiskAssignment(proto.Message): + r"""Data disk assignment information for a specific key-range of + a sharded computation. + Currently we only support UTF-8 character splits to simplify + encoding into JSON. + + Attributes: + start (str): + The start (inclusive) of the key range. + end (str): + The end (exclusive) of the key range. + data_disk (str): + The name of the data disk where data for this + range is stored. This name is local to the + Google Cloud Platform project and uniquely + identifies the disk within that project, for + example + "myproject-1014-104817-4c2-harness-0-disk-1". + """ + + start: str = proto.Field( + proto.STRING, + number=1, + ) + end: str = proto.Field( + proto.STRING, + number=2, + ) + data_disk: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StreamingComputationRanges(proto.Message): + r"""Describes full or partial data disk assignment information of + the computation ranges. + + Attributes: + computation_id (str): + The ID of the computation. + range_assignments (MutableSequence[google.cloud.dataflow_v1beta3.types.KeyRangeDataDiskAssignment]): + Data disk assignments for ranges from this + computation. + """ + + computation_id: str = proto.Field( + proto.STRING, + number=1, + ) + range_assignments: MutableSequence['KeyRangeDataDiskAssignment'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='KeyRangeDataDiskAssignment', + ) + + +class StreamingApplianceSnapshotConfig(proto.Message): + r"""Streaming appliance snapshot configuration. + + Attributes: + snapshot_id (str): + If set, indicates the snapshot id for the + snapshot being performed. + import_state_endpoint (str): + Indicates which endpoint is used to import + appliance state. + """ + + snapshot_id: str = proto.Field( + proto.STRING, + number=1, + ) + import_state_endpoint: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/templates.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/templates.py new file mode 100644 index 0000000..0ae7c25 --- /dev/null +++ b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/templates.py @@ -0,0 +1,1063 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.dataflow_v1beta3.types import environment as gd_environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.dataflow.v1beta3', + manifest={ + 'ParameterType', + 'LaunchFlexTemplateResponse', + 'ContainerSpec', + 'LaunchFlexTemplateParameter', + 'FlexTemplateRuntimeEnvironment', + 'LaunchFlexTemplateRequest', + 'RuntimeEnvironment', + 'ParameterMetadata', + 'TemplateMetadata', + 'SDKInfo', + 'RuntimeMetadata', + 'CreateJobFromTemplateRequest', + 'GetTemplateRequest', + 'GetTemplateResponse', + 'LaunchTemplateParameters', + 'LaunchTemplateRequest', + 'LaunchTemplateResponse', + 'InvalidTemplateParameters', + 'DynamicTemplateLaunchParams', + }, +) + + +class ParameterType(proto.Enum): + r"""ParameterType specifies what kind of input we need for this + parameter. + """ + DEFAULT = 0 + TEXT = 1 + GCS_READ_BUCKET = 2 + GCS_WRITE_BUCKET = 3 + GCS_READ_FILE = 4 + GCS_WRITE_FILE = 5 + GCS_READ_FOLDER = 6 + GCS_WRITE_FOLDER = 7 + PUBSUB_TOPIC = 8 + PUBSUB_SUBSCRIPTION = 9 + + +class LaunchFlexTemplateResponse(proto.Message): + r"""Response to the request to launch a job from Flex Template. + + Attributes: + job (google.cloud.dataflow_v1beta3.types.Job): + The job that was launched, if the request was + not a dry run and the job was successfully + launched. + """ + + job: jobs.Job = proto.Field( + proto.MESSAGE, + number=1, + message=jobs.Job, + ) + + +class ContainerSpec(proto.Message): + r"""Container Spec. + + Attributes: + image (str): + Name of the docker container image. E.g., + gcr.io/project/some-image + metadata (google.cloud.dataflow_v1beta3.types.TemplateMetadata): + Metadata describing a template including + description and validation rules. + sdk_info (google.cloud.dataflow_v1beta3.types.SDKInfo): + Required. SDK info of the Flex Template. + default_environment (google.cloud.dataflow_v1beta3.types.FlexTemplateRuntimeEnvironment): + Default runtime environment for the job. + """ + + image: str = proto.Field( + proto.STRING, + number=1, + ) + metadata: 'TemplateMetadata' = proto.Field( + proto.MESSAGE, + number=2, + message='TemplateMetadata', + ) + sdk_info: 'SDKInfo' = proto.Field( + proto.MESSAGE, + number=3, + message='SDKInfo', + ) + default_environment: 'FlexTemplateRuntimeEnvironment' = proto.Field( + proto.MESSAGE, + number=4, + message='FlexTemplateRuntimeEnvironment', + ) + + +class LaunchFlexTemplateParameter(proto.Message): + r"""Launch FlexTemplate Parameter. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + job_name (str): + Required. The job name to use for the created + job. For update job request, job name should be + same as the existing running job. + container_spec (google.cloud.dataflow_v1beta3.types.ContainerSpec): + Spec about the container image to launch. + + This field is a member of `oneof`_ ``template``. + container_spec_gcs_path (str): + Cloud Storage path to a file with json + serialized ContainerSpec as content. + + This field is a member of `oneof`_ ``template``. + parameters (MutableMapping[str, str]): + The parameters for FlexTemplate. Ex. {"num_workers":"5"} + launch_options (MutableMapping[str, str]): + Launch options for this flex template job. + This is a common set of options across languages + and templates. This should not be used to pass + job parameters. + environment (google.cloud.dataflow_v1beta3.types.FlexTemplateRuntimeEnvironment): + The runtime environment for the FlexTemplate + job + update (bool): + Set this to true if you are sending a request + to update a running streaming job. When set, the + job name should be the same as the running job. + transform_name_mappings (MutableMapping[str, str]): + Use this to pass transform_name_mappings for streaming + update jobs. Ex:{"oldTransformName":"newTransformName",...}' + """ + + job_name: str = proto.Field( + proto.STRING, + number=1, + ) + container_spec: 'ContainerSpec' = proto.Field( + proto.MESSAGE, + number=4, + oneof='template', + message='ContainerSpec', + ) + container_spec_gcs_path: str = proto.Field( + proto.STRING, + number=5, + oneof='template', + ) + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + launch_options: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + environment: 'FlexTemplateRuntimeEnvironment' = proto.Field( + proto.MESSAGE, + number=7, + message='FlexTemplateRuntimeEnvironment', + ) + update: bool = proto.Field( + proto.BOOL, + number=8, + ) + transform_name_mappings: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + + +class FlexTemplateRuntimeEnvironment(proto.Message): + r"""The environment values to be set at runtime for flex + template. + + Attributes: + num_workers (int): + The initial number of Google Compute Engine + instances for the job. + max_workers (int): + The maximum number of Google Compute Engine + instances to be made available to your pipeline + during execution, from 1 to 1000. + zone (str): + The Compute Engine `availability + zone `__ + for launching worker instances to run your pipeline. In the + future, worker_zone will take precedence. + service_account_email (str): + The email address of the service account to + run the job as. + temp_location (str): + The Cloud Storage path to use for temporary files. Must be a + valid Cloud Storage URL, beginning with ``gs://``. + machine_type (str): + The machine type to use for the job. Defaults + to the value from the template if not specified. + additional_experiments (MutableSequence[str]): + Additional experiment flags for the job. + network (str): + Network to which VMs will be assigned. If + empty or unspecified, the service will use the + network "default". + subnetwork (str): + Subnetwork to which VMs will be assigned, if desired. You + can specify a subnetwork using either a complete URL or an + abbreviated path. Expected to be of the form + "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" + or "regions/REGION/subnetworks/SUBNETWORK". If the + subnetwork is located in a Shared VPC network, you must use + the complete URL. + additional_user_labels (MutableMapping[str, str]): + Additional user labels to be specified for the job. Keys and + values must follow the restrictions specified in the + `labeling + restrictions `__ + page. An object containing a list of "key": value pairs. + Example: { "name": "wrench", "mass": "1kg", "count": "3" }. + kms_key_name (str): + Name for the Cloud KMS key for the job. + Key format is: + projects//locations//keyRings//cryptoKeys/ + ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): + Configuration for VM IPs. + worker_region (str): + The Compute Engine region + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1". + Mutually exclusive with worker_zone. If neither + worker_region nor worker_zone is specified, default to the + control plane's region. + worker_zone (str): + The Compute Engine zone + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1-a". + Mutually exclusive with worker_region. If neither + worker_region nor worker_zone is specified, a zone in the + control plane's region is chosen based on available + capacity. If both ``worker_zone`` and ``zone`` are set, + ``worker_zone`` takes precedence. + enable_streaming_engine (bool): + Whether to enable Streaming Engine for the + job. + flexrs_goal (google.cloud.dataflow_v1beta3.types.FlexResourceSchedulingGoal): + Set FlexRS goal for the job. + https://cloud.google.com/dataflow/docs/guides/flexrs + staging_location (str): + The Cloud Storage path for staging local files. Must be a + valid Cloud Storage URL, beginning with ``gs://``. + sdk_container_image (str): + Docker registry location of container image + to use for the 'worker harness. Default is the + container for the version of the SDK. Note this + field is only valid for portable pipelines. + disk_size_gb (int): + Worker disk size, in gigabytes. + autoscaling_algorithm (google.cloud.dataflow_v1beta3.types.AutoscalingAlgorithm): + The algorithm to use for autoscaling + dump_heap_on_oom (bool): + If true, save a heap dump before killing a + thread or process which is GC thrashing or out + of memory. The location of the heap file will + either be echoed back to the user, or the user + will be given the opportunity to download the + heap file. + save_heap_dumps_to_gcs_path (str): + Cloud Storage bucket (directory) to upload heap dumps to the + given location. Enabling this implies that heap dumps should + be generated on OOM (dump_heap_on_oom is set to true). + launcher_machine_type (str): + The machine type to use for launching the + job. The default is n1-standard-1. + """ + + num_workers: int = proto.Field( + proto.INT32, + number=1, + ) + max_workers: int = proto.Field( + proto.INT32, + number=2, + ) + zone: str = proto.Field( + proto.STRING, + number=3, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=4, + ) + temp_location: str = proto.Field( + proto.STRING, + number=5, + ) + machine_type: str = proto.Field( + proto.STRING, + number=6, + ) + additional_experiments: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + network: str = proto.Field( + proto.STRING, + number=8, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=9, + ) + additional_user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=11, + ) + ip_configuration: gd_environment.WorkerIPAddressConfiguration = proto.Field( + proto.ENUM, + number=12, + enum=gd_environment.WorkerIPAddressConfiguration, + ) + worker_region: str = proto.Field( + proto.STRING, + number=13, + ) + worker_zone: str = proto.Field( + proto.STRING, + number=14, + ) + enable_streaming_engine: bool = proto.Field( + proto.BOOL, + number=15, + ) + flexrs_goal: gd_environment.FlexResourceSchedulingGoal = proto.Field( + proto.ENUM, + number=16, + enum=gd_environment.FlexResourceSchedulingGoal, + ) + staging_location: str = proto.Field( + proto.STRING, + number=17, + ) + sdk_container_image: str = proto.Field( + proto.STRING, + number=18, + ) + disk_size_gb: int = proto.Field( + proto.INT32, + number=20, + ) + autoscaling_algorithm: gd_environment.AutoscalingAlgorithm = proto.Field( + proto.ENUM, + number=21, + enum=gd_environment.AutoscalingAlgorithm, + ) + dump_heap_on_oom: bool = proto.Field( + proto.BOOL, + number=22, + ) + save_heap_dumps_to_gcs_path: str = proto.Field( + proto.STRING, + number=23, + ) + launcher_machine_type: str = proto.Field( + proto.STRING, + number=24, + ) + + +class LaunchFlexTemplateRequest(proto.Message): + r"""A request to launch a Cloud Dataflow job from a FlexTemplate. + + Attributes: + project_id (str): + Required. The ID of the Cloud Platform + project that the job belongs to. + launch_parameter (google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateParameter): + Required. Parameter to launch a job form Flex + Template. + location (str): + Required. The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + to which to direct the request. E.g., us-central1, us-west1. + validate_only (bool): + If true, the request is validated but not + actually executed. Defaults to false. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + launch_parameter: 'LaunchFlexTemplateParameter' = proto.Field( + proto.MESSAGE, + number=2, + message='LaunchFlexTemplateParameter', + ) + location: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class RuntimeEnvironment(proto.Message): + r"""The environment values to set at runtime. + + Attributes: + num_workers (int): + The initial number of Google Compute Engine + instnaces for the job. + max_workers (int): + The maximum number of Google Compute Engine + instances to be made available to your pipeline + during execution, from 1 to 1000. + zone (str): + The Compute Engine `availability + zone `__ + for launching worker instances to run your pipeline. In the + future, worker_zone will take precedence. + service_account_email (str): + The email address of the service account to + run the job as. + temp_location (str): + The Cloud Storage path to use for temporary files. Must be a + valid Cloud Storage URL, beginning with ``gs://``. + bypass_temp_dir_validation (bool): + Whether to bypass the safety checks for the + job's temporary directory. Use with caution. + machine_type (str): + The machine type to use for the job. Defaults + to the value from the template if not specified. + additional_experiments (MutableSequence[str]): + Additional experiment flags for the job, specified with the + ``--experiments`` option. + network (str): + Network to which VMs will be assigned. If + empty or unspecified, the service will use the + network "default". + subnetwork (str): + Subnetwork to which VMs will be assigned, if desired. You + can specify a subnetwork using either a complete URL or an + abbreviated path. Expected to be of the form + "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" + or "regions/REGION/subnetworks/SUBNETWORK". If the + subnetwork is located in a Shared VPC network, you must use + the complete URL. + additional_user_labels (MutableMapping[str, str]): + Additional user labels to be specified for the job. Keys and + values should follow the restrictions specified in the + `labeling + restrictions `__ + page. An object containing a list of "key": value pairs. + Example: { "name": "wrench", "mass": "1kg", "count": "3" }. + kms_key_name (str): + Name for the Cloud KMS key for the job. + Key format is: + projects//locations//keyRings//cryptoKeys/ + ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): + Configuration for VM IPs. + worker_region (str): + The Compute Engine region + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1". + Mutually exclusive with worker_zone. If neither + worker_region nor worker_zone is specified, default to the + control plane's region. + worker_zone (str): + The Compute Engine zone + (https://cloud.google.com/compute/docs/regions-zones/regions-zones) + in which worker processing should occur, e.g. "us-west1-a". + Mutually exclusive with worker_region. If neither + worker_region nor worker_zone is specified, a zone in the + control plane's region is chosen based on available + capacity. If both ``worker_zone`` and ``zone`` are set, + ``worker_zone`` takes precedence. + enable_streaming_engine (bool): + Whether to enable Streaming Engine for the + job. + """ + + num_workers: int = proto.Field( + proto.INT32, + number=11, + ) + max_workers: int = proto.Field( + proto.INT32, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=3, + ) + temp_location: str = proto.Field( + proto.STRING, + number=4, + ) + bypass_temp_dir_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) + machine_type: str = proto.Field( + proto.STRING, + number=6, + ) + additional_experiments: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) + network: str = proto.Field( + proto.STRING, + number=8, + ) + subnetwork: str = proto.Field( + proto.STRING, + number=9, + ) + additional_user_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=12, + ) + ip_configuration: gd_environment.WorkerIPAddressConfiguration = proto.Field( + proto.ENUM, + number=14, + enum=gd_environment.WorkerIPAddressConfiguration, + ) + worker_region: str = proto.Field( + proto.STRING, + number=15, + ) + worker_zone: str = proto.Field( + proto.STRING, + number=16, + ) + enable_streaming_engine: bool = proto.Field( + proto.BOOL, + number=17, + ) + + +class ParameterMetadata(proto.Message): + r"""Metadata for a specific parameter. + + Attributes: + name (str): + Required. The name of the parameter. + label (str): + Required. The label to display for the + parameter. + help_text (str): + Required. The help text to display for the + parameter. + is_optional (bool): + Optional. Whether the parameter is optional. + Defaults to false. + regexes (MutableSequence[str]): + Optional. Regexes that the parameter must + match. + param_type (google.cloud.dataflow_v1beta3.types.ParameterType): + Optional. The type of the parameter. + Used for selecting input picker. + custom_metadata (MutableMapping[str, str]): + Optional. Additional metadata for describing + this parameter. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + label: str = proto.Field( + proto.STRING, + number=2, + ) + help_text: str = proto.Field( + proto.STRING, + number=3, + ) + is_optional: bool = proto.Field( + proto.BOOL, + number=4, + ) + regexes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + param_type: 'ParameterType' = proto.Field( + proto.ENUM, + number=6, + enum='ParameterType', + ) + custom_metadata: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + + +class TemplateMetadata(proto.Message): + r"""Metadata describing a template. + + Attributes: + name (str): + Required. The name of the template. + description (str): + Optional. A description of the template. + parameters (MutableSequence[google.cloud.dataflow_v1beta3.types.ParameterMetadata]): + The parameters for the template. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + parameters: MutableSequence['ParameterMetadata'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ParameterMetadata', + ) + + +class SDKInfo(proto.Message): + r"""SDK Information. + + Attributes: + language (google.cloud.dataflow_v1beta3.types.SDKInfo.Language): + Required. The SDK Language. + version (str): + Optional. The SDK version. + """ + class Language(proto.Enum): + r"""SDK Language.""" + UNKNOWN = 0 + JAVA = 1 + PYTHON = 2 + + language: Language = proto.Field( + proto.ENUM, + number=1, + enum=Language, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RuntimeMetadata(proto.Message): + r"""RuntimeMetadata describing a runtime environment. + + Attributes: + sdk_info (google.cloud.dataflow_v1beta3.types.SDKInfo): + SDK Info for the template. + parameters (MutableSequence[google.cloud.dataflow_v1beta3.types.ParameterMetadata]): + The parameters for the template. + """ + + sdk_info: 'SDKInfo' = proto.Field( + proto.MESSAGE, + number=1, + message='SDKInfo', + ) + parameters: MutableSequence['ParameterMetadata'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='ParameterMetadata', + ) + + +class CreateJobFromTemplateRequest(proto.Message): + r"""A request to create a Cloud Dataflow job from a template. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project_id (str): + Required. The ID of the Cloud Platform + project that the job belongs to. + job_name (str): + Required. The job name to use for the created + job. + gcs_path (str): + Required. A Cloud Storage path to the template from which to + create the job. Must be a valid Cloud Storage URL, beginning + with ``gs://``. + + This field is a member of `oneof`_ ``template``. + parameters (MutableMapping[str, str]): + The runtime parameters to pass to the job. + environment (google.cloud.dataflow_v1beta3.types.RuntimeEnvironment): + The runtime environment for the job. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + to which to direct the request. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + job_name: str = proto.Field( + proto.STRING, + number=4, + ) + gcs_path: str = proto.Field( + proto.STRING, + number=2, + oneof='template', + ) + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + environment: 'RuntimeEnvironment' = proto.Field( + proto.MESSAGE, + number=5, + message='RuntimeEnvironment', + ) + location: str = proto.Field( + proto.STRING, + number=6, + ) + + +class GetTemplateRequest(proto.Message): + r"""A request to retrieve a Cloud Dataflow job template. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project_id (str): + Required. The ID of the Cloud Platform + project that the job belongs to. + gcs_path (str): + Required. A Cloud Storage path to the + template from which to create the job. + Must be valid Cloud Storage URL, beginning with + 'gs://'. + + This field is a member of `oneof`_ ``template``. + view (google.cloud.dataflow_v1beta3.types.GetTemplateRequest.TemplateView): + The view to retrieve. Defaults to METADATA_ONLY. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + to which to direct the request. + """ + class TemplateView(proto.Enum): + r"""The various views of a template that may be retrieved.""" + METADATA_ONLY = 0 + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + gcs_path: str = proto.Field( + proto.STRING, + number=2, + oneof='template', + ) + view: TemplateView = proto.Field( + proto.ENUM, + number=3, + enum=TemplateView, + ) + location: str = proto.Field( + proto.STRING, + number=4, + ) + + +class GetTemplateResponse(proto.Message): + r"""The response to a GetTemplate request. + + Attributes: + status (google.rpc.status_pb2.Status): + The status of the get template request. Any problems with + the request will be indicated in the error_details. + metadata (google.cloud.dataflow_v1beta3.types.TemplateMetadata): + The template metadata describing the template + name, available parameters, etc. + template_type (google.cloud.dataflow_v1beta3.types.GetTemplateResponse.TemplateType): + Template Type. + runtime_metadata (google.cloud.dataflow_v1beta3.types.RuntimeMetadata): + Describes the runtime metadata with SDKInfo + and available parameters. + """ + class TemplateType(proto.Enum): + r"""Template Type.""" + UNKNOWN = 0 + LEGACY = 1 + FLEX = 2 + + status: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + metadata: 'TemplateMetadata' = proto.Field( + proto.MESSAGE, + number=2, + message='TemplateMetadata', + ) + template_type: TemplateType = proto.Field( + proto.ENUM, + number=3, + enum=TemplateType, + ) + runtime_metadata: 'RuntimeMetadata' = proto.Field( + proto.MESSAGE, + number=4, + message='RuntimeMetadata', + ) + + +class LaunchTemplateParameters(proto.Message): + r"""Parameters to provide to the template being launched. + + Attributes: + job_name (str): + Required. The job name to use for the created + job. + parameters (MutableMapping[str, str]): + The runtime parameters to pass to the job. + environment (google.cloud.dataflow_v1beta3.types.RuntimeEnvironment): + The runtime environment for the job. + update (bool): + If set, replace the existing pipeline with + the name specified by jobName with this + pipeline, preserving state. + transform_name_mapping (MutableMapping[str, str]): + Only applicable when updating a pipeline. Map + of transform name prefixes of the job to be + replaced to the corresponding name prefixes of + the new job. + """ + + job_name: str = proto.Field( + proto.STRING, + number=1, + ) + parameters: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + environment: 'RuntimeEnvironment' = proto.Field( + proto.MESSAGE, + number=3, + message='RuntimeEnvironment', + ) + update: bool = proto.Field( + proto.BOOL, + number=4, + ) + transform_name_mapping: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class LaunchTemplateRequest(proto.Message): + r"""A request to launch a template. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project_id (str): + Required. The ID of the Cloud Platform + project that the job belongs to. + validate_only (bool): + If true, the request is validated but not + actually executed. Defaults to false. + gcs_path (str): + A Cloud Storage path to the template from + which to create the job. + Must be valid Cloud Storage URL, beginning with + 'gs://'. + + This field is a member of `oneof`_ ``template``. + dynamic_template (google.cloud.dataflow_v1beta3.types.DynamicTemplateLaunchParams): + Params for launching a dynamic template. + + This field is a member of `oneof`_ ``template``. + launch_parameters (google.cloud.dataflow_v1beta3.types.LaunchTemplateParameters): + The parameters of the template to launch. + This should be part of the body of the POST + request. + location (str): + The [regional endpoint] + (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) + to which to direct the request. + """ + + project_id: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + gcs_path: str = proto.Field( + proto.STRING, + number=3, + oneof='template', + ) + dynamic_template: 'DynamicTemplateLaunchParams' = proto.Field( + proto.MESSAGE, + number=6, + oneof='template', + message='DynamicTemplateLaunchParams', + ) + launch_parameters: 'LaunchTemplateParameters' = proto.Field( + proto.MESSAGE, + number=4, + message='LaunchTemplateParameters', + ) + location: str = proto.Field( + proto.STRING, + number=5, + ) + + +class LaunchTemplateResponse(proto.Message): + r"""Response to the request to launch a template. + + Attributes: + job (google.cloud.dataflow_v1beta3.types.Job): + The job that was launched, if the request was + not a dry run and the job was successfully + launched. + """ + + job: jobs.Job = proto.Field( + proto.MESSAGE, + number=1, + message=jobs.Job, + ) + + +class InvalidTemplateParameters(proto.Message): + r"""Used in the error_details field of a google.rpc.Status message, this + indicates problems with the template parameter. + + Attributes: + parameter_violations (MutableSequence[google.cloud.dataflow_v1beta3.types.InvalidTemplateParameters.ParameterViolation]): + Describes all parameter violations in a + template request. + """ + + class ParameterViolation(proto.Message): + r"""A specific template-parameter violation. + + Attributes: + parameter (str): + The parameter that failed to validate. + description (str): + A description of why the parameter failed to + validate. + """ + + parameter: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + + parameter_violations: MutableSequence[ParameterViolation] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=ParameterViolation, + ) + + +class DynamicTemplateLaunchParams(proto.Message): + r"""Params which should be passed when launching a dynamic + template. + + Attributes: + gcs_path (str): + Path to dynamic template spec file on Cloud + Storage. The file must be a Json serialized + DynamicTemplateFieSpec object. + staging_location (str): + Cloud Storage path for staging dependencies. Must be a valid + Cloud Storage URL, beginning with ``gs://``. + """ + + gcs_path: str = proto.Field( + proto.STRING, + number=1, + ) + staging_location: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/mypy.ini b/owl-bot-staging/v1beta3/mypy.ini new file mode 100644 index 0000000..574c5ae --- /dev/null +++ b/owl-bot-staging/v1beta3/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1beta3/noxfile.py b/owl-bot-staging/v1beta3/noxfile.py new file mode 100644 index 0000000..2951d2e --- /dev/null +++ b/owl-bot-staging/v1beta3/noxfile.py @@ -0,0 +1,183 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.10" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/dataflow_v1beta3/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py new file mode 100644 index 0000000..0a4fc7b --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LaunchFlexTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_launch_flex_template(): + # Create a client + client = dataflow_v1beta3.FlexTemplatesServiceAsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchFlexTemplateRequest( + ) + + # Make the request + response = await client.launch_flex_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py new file mode 100644 index 0000000..8f236ed --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LaunchFlexTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_launch_flex_template(): + # Create a client + client = dataflow_v1beta3.FlexTemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchFlexTemplateRequest( + ) + + # Make the request + response = client.launch_flex_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py new file mode 100644 index 0000000..701ae61 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_aggregated_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.aggregated_list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py new file mode 100644 index 0000000..7b9946e --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_aggregated_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.aggregated_list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py new file mode 100644 index 0000000..eb9e815 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckActiveJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_check_active_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CheckActiveJobsRequest( + ) + + # Make the request + response = await client.check_active_jobs(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py new file mode 100644 index 0000000..591caa0 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CheckActiveJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_check_active_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.CheckActiveJobsRequest( + ) + + # Make the request + response = client.check_active_jobs(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py new file mode 100644 index 0000000..aa6081a --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_create_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobRequest( + ) + + # Make the request + response = await client.create_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py new file mode 100644 index 0000000..b91dc91 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_create_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobRequest( + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py new file mode 100644 index 0000000..addc3ab --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_get_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobRequest( + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py new file mode 100644 index 0000000..c4bef41 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobRequest( + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py new file mode 100644 index 0000000..c4084d0 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py new file mode 100644 index 0000000..a975672 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_jobs(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobsRequest( + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py new file mode 100644 index 0000000..7be3e3f --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SnapshotJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_snapshot_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.SnapshotJobRequest( + ) + + # Make the request + response = await client.snapshot_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py new file mode 100644 index 0000000..db5e822 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SnapshotJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_snapshot_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.SnapshotJobRequest( + ) + + # Make the request + response = client.snapshot_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py new file mode 100644 index 0000000..a5b58a1 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_update_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.UpdateJobRequest( + ) + + # Make the request + response = await client.update_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py new file mode 100644 index 0000000..9fff7ed --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_update_job(): + # Create a client + client = dataflow_v1beta3.JobsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.UpdateJobRequest( + ) + + # Make the request + response = client.update_job(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py new file mode 100644 index 0000000..8f449c0 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_list_job_messages(): + # Create a client + client = dataflow_v1beta3.MessagesV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobMessagesRequest( + ) + + # Make the request + page_result = client.list_job_messages(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py new file mode 100644 index 0000000..256bde8 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobMessages +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_job_messages(): + # Create a client + client = dataflow_v1beta3.MessagesV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListJobMessagesRequest( + ) + + # Make the request + page_result = client.list_job_messages(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py new file mode 100644 index 0000000..97150ab --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobExecutionDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_get_job_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_job_execution_details(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py new file mode 100644 index 0000000..9268495 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobExecutionDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_job_execution_details(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py new file mode 100644 index 0000000..c285799 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_get_job_metrics(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobMetricsRequest( + ) + + # Make the request + response = await client.get_job_metrics(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py new file mode 100644 index 0000000..bbe9622 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_job_metrics(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetJobMetricsRequest( + ) + + # Make the request + response = client.get_job_metrics(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py new file mode 100644 index 0000000..431fc92 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStageExecutionDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_get_stage_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetStageExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_stage_execution_details(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py new file mode 100644 index 0000000..c9e9729 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetStageExecutionDetails +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_stage_execution_details(): + # Create a client + client = dataflow_v1beta3.MetricsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetStageExecutionDetailsRequest( + ) + + # Make the request + page_result = client.get_stage_execution_details(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py new file mode 100644 index 0000000..a3d83e7 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_delete_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.DeleteSnapshotRequest( + ) + + # Make the request + response = await client.delete_snapshot(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py new file mode 100644 index 0000000..59a50f5 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_delete_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.DeleteSnapshotRequest( + ) + + # Make the request + response = client.delete_snapshot(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py new file mode 100644 index 0000000..b95e491 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_get_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetSnapshotRequest( + ) + + # Make the request + response = await client.get_snapshot(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py new file mode 100644 index 0000000..f1861f6 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_snapshot(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetSnapshotRequest( + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py new file mode 100644 index 0000000..e8303a0 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_list_snapshots(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListSnapshotsRequest( + ) + + # Make the request + response = await client.list_snapshots(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py new file mode 100644 index 0000000..bdd2529 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_list_snapshots(): + # Create a client + client = dataflow_v1beta3.SnapshotsV1Beta3Client() + + # Initialize request argument(s) + request = dataflow_v1beta3.ListSnapshotsRequest( + ) + + # Make the request + response = client.list_snapshots(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py new file mode 100644 index 0000000..e21cec8 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobFromTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_create_job_from_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceAsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobFromTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = await client.create_job_from_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py new file mode 100644 index 0000000..175c180 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobFromTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_create_job_from_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.CreateJobFromTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.create_job_from_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py new file mode 100644 index 0000000..8760665 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_get_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceAsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = await client.get_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py new file mode 100644 index 0000000..c7f9cae --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_get_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.GetTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.get_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py new file mode 100644 index 0000000..5d00450 --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LaunchTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +async def sample_launch_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceAsyncClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = await client.launch_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py new file mode 100644 index 0000000..7f1f81d --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for LaunchTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataflow-client + + +# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataflow_v1beta3 + + +def sample_launch_template(): + # Create a client + client = dataflow_v1beta3.TemplatesServiceClient() + + # Initialize request argument(s) + request = dataflow_v1beta3.LaunchTemplateRequest( + gcs_path="gcs_path_value", + ) + + # Make the request + response = client.launch_template(request=request) + + # Handle the response + print(response) + +# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json b/owl-bot-staging/v1beta3/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json new file mode 100644 index 0000000..07ae5fa --- /dev/null +++ b/owl-bot-staging/v1beta3/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json @@ -0,0 +1,2769 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.dataflow.v1beta3", + "version": "v1beta3" + } + ], + "language": "PYTHON", + "name": "google-cloud-dataflow-client", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceAsyncClient", + "shortName": "FlexTemplatesServiceAsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceAsyncClient.launch_flex_template", + "method": { + "fullName": "google.dataflow.v1beta3.FlexTemplatesService.LaunchFlexTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.FlexTemplatesService", + "shortName": "FlexTemplatesService" + }, + "shortName": "LaunchFlexTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse", + "shortName": "launch_flex_template" + }, + "description": "Sample for LaunchFlexTemplate", + "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceClient", + "shortName": "FlexTemplatesServiceClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceClient.launch_flex_template", + "method": { + "fullName": "google.dataflow.v1beta3.FlexTemplatesService.LaunchFlexTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.FlexTemplatesService", + "shortName": "FlexTemplatesService" + }, + "shortName": "LaunchFlexTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse", + "shortName": "launch_flex_template" + }, + "description": "Sample for LaunchFlexTemplate", + "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.aggregated_list_jobs", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "AggregatedListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsAsyncPager", + "shortName": "aggregated_list_jobs" + }, + "description": "Sample for AggregatedListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.aggregated_list_jobs", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "AggregatedListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager", + "shortName": "aggregated_list_jobs" + }, + "description": "Sample for AggregatedListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.check_active_jobs", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "CheckActiveJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", + "shortName": "check_active_jobs" + }, + "description": "Sample for CheckActiveJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.check_active_jobs", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "CheckActiveJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", + "shortName": "check_active_jobs" + }, + "description": "Sample for CheckActiveJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.create_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "CreateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job" + }, + "description": "Sample for CreateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.create_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "CreateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job" + }, + "description": "Sample for CreateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.get_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.get_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.list_jobs", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.list_jobs", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.snapshot_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "SnapshotJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "snapshot_job" + }, + "description": "Sample for SnapshotJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.snapshot_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "SnapshotJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "snapshot_job" + }, + "description": "Sample for SnapshotJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", + "shortName": "JobsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.update_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "UpdateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "update_job" + }, + "description": "Sample for UpdateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", + "shortName": "JobsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.update_job", + "method": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", + "service": { + "fullName": "google.dataflow.v1beta3.JobsV1Beta3", + "shortName": "JobsV1Beta3" + }, + "shortName": "UpdateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "update_job" + }, + "description": "Sample for UpdateJob", + "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient", + "shortName": "MessagesV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient.list_job_messages", + "method": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "service": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", + "shortName": "MessagesV1Beta3" + }, + "shortName": "ListJobMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager", + "shortName": "list_job_messages" + }, + "description": "Sample for ListJobMessages", + "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client", + "shortName": "MessagesV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client.list_job_messages", + "method": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", + "service": { + "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", + "shortName": "MessagesV1Beta3" + }, + "shortName": "ListJobMessages" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager", + "shortName": "list_job_messages" + }, + "description": "Sample for ListJobMessages", + "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager", + "shortName": "get_job_execution_details" + }, + "description": "Sample for GetJobExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager", + "shortName": "get_job_execution_details" + }, + "description": "Sample for GetJobExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_metrics", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", + "shortName": "get_job_metrics" + }, + "description": "Sample for GetJobMetrics", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_metrics", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetJobMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", + "shortName": "get_job_metrics" + }, + "description": "Sample for GetJobMetrics", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", + "shortName": "MetricsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_stage_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetStageExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager", + "shortName": "get_stage_execution_details" + }, + "description": "Sample for GetStageExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", + "shortName": "MetricsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_stage_execution_details", + "method": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", + "service": { + "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", + "shortName": "MetricsV1Beta3" + }, + "shortName": "GetStageExecutionDetails" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager", + "shortName": "get_stage_execution_details" + }, + "description": "Sample for GetStageExecutionDetails", + "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", + "shortName": "SnapshotsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.delete_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.delete_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", + "shortName": "SnapshotsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.get_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.get_snapshot", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", + "shortName": "SnapshotsV1Beta3AsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.list_snapshots", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "ListSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", + "shortName": "SnapshotsV1Beta3Client" + }, + "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.list_snapshots", + "method": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", + "service": { + "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", + "shortName": "SnapshotsV1Beta3" + }, + "shortName": "ListSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", + "shortName": "TemplatesServiceAsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.create_job_from_template", + "method": { + "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" + }, + "shortName": "CreateJobFromTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job_from_template" + }, + "description": "Sample for CreateJobFromTemplate", + "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.create_job_from_template", + "method": { + "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" + }, + "shortName": "CreateJobFromTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.Job", + "shortName": "create_job_from_template" + }, + "description": "Sample for CreateJobFromTemplate", + "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", + "shortName": "TemplatesServiceAsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.get_template", + "method": { + "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" + }, + "shortName": "GetTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", + "shortName": "get_template" + }, + "description": "Sample for GetTemplate", + "file": "dataflow_v1beta3_generated_templates_service_get_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_templates_service_get_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.get_template", + "method": { + "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" + }, + "shortName": "GetTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", + "shortName": "get_template" + }, + "description": "Sample for GetTemplate", + "file": "dataflow_v1beta3_generated_templates_service_get_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_templates_service_get_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", + "shortName": "TemplatesServiceAsyncClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.launch_template", + "method": { + "fullName": "google.dataflow.v1beta3.TemplatesService.LaunchTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" + }, + "shortName": "LaunchTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse", + "shortName": "launch_template" + }, + "description": "Sample for LaunchTemplate", + "file": "dataflow_v1beta3_generated_templates_service_launch_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_templates_service_launch_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", + "shortName": "TemplatesServiceClient" + }, + "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.launch_template", + "method": { + "fullName": "google.dataflow.v1beta3.TemplatesService.LaunchTemplate", + "service": { + "fullName": "google.dataflow.v1beta3.TemplatesService", + "shortName": "TemplatesService" + }, + "shortName": "LaunchTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse", + "shortName": "launch_template" + }, + "description": "Sample for LaunchTemplate", + "file": "dataflow_v1beta3_generated_templates_service_launch_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataflow_v1beta3_generated_templates_service_launch_template_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1beta3/scripts/fixup_dataflow_v1beta3_keywords.py b/owl-bot-staging/v1beta3/scripts/fixup_dataflow_v1beta3_keywords.py new file mode 100644 index 0000000..8afa679 --- /dev/null +++ b/owl-bot-staging/v1beta3/scripts/fixup_dataflow_v1beta3_keywords.py @@ -0,0 +1,193 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dataflowCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'aggregated_list_jobs': ('filter', 'project_id', 'view', 'page_size', 'page_token', 'location', ), + 'check_active_jobs': ('project_id', ), + 'create_job': ('project_id', 'job', 'view', 'replace_job_id', 'location', ), + 'create_job_from_template': ('project_id', 'job_name', 'gcs_path', 'parameters', 'environment', 'location', ), + 'delete_snapshot': ('project_id', 'snapshot_id', 'location', ), + 'get_job': ('project_id', 'job_id', 'view', 'location', ), + 'get_job_execution_details': ('project_id', 'job_id', 'location', 'page_size', 'page_token', ), + 'get_job_metrics': ('project_id', 'job_id', 'start_time', 'location', ), + 'get_snapshot': ('project_id', 'snapshot_id', 'location', ), + 'get_stage_execution_details': ('project_id', 'job_id', 'location', 'stage_id', 'page_size', 'page_token', 'start_time', 'end_time', ), + 'get_template': ('project_id', 'gcs_path', 'view', 'location', ), + 'launch_flex_template': ('project_id', 'launch_parameter', 'location', 'validate_only', ), + 'launch_template': ('project_id', 'validate_only', 'gcs_path', 'dynamic_template', 'launch_parameters', 'location', ), + 'list_job_messages': ('project_id', 'job_id', 'minimum_importance', 'page_size', 'page_token', 'start_time', 'end_time', 'location', ), + 'list_jobs': ('filter', 'project_id', 'view', 'page_size', 'page_token', 'location', ), + 'list_snapshots': ('project_id', 'job_id', 'location', ), + 'snapshot_job': ('project_id', 'job_id', 'ttl', 'location', 'snapshot_sources', 'description', ), + 'update_job': ('project_id', 'job_id', 'job', 'location', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dataflowCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dataflow client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1beta3/setup.py b/owl-bot-staging/v1beta3/setup.py new file mode 100644 index 0000000..e5f5b42 --- /dev/null +++ b/owl-bot-staging/v1beta3/setup.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-dataflow-client' + + +description = "Google Cloud Dataflow Client API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/dataflow/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-dataflow-client" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.10.txt b/owl-bot-staging/v1beta3/testing/constraints-3.10.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1beta3/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.11.txt b/owl-bot-staging/v1beta3/testing/constraints-3.11.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1beta3/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.7.txt b/owl-bot-staging/v1beta3/testing/constraints-3.7.txt new file mode 100644 index 0000000..6c44adf --- /dev/null +++ b/owl-bot-staging/v1beta3/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.8.txt b/owl-bot-staging/v1beta3/testing/constraints-3.8.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1beta3/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.9.txt b/owl-bot-staging/v1beta3/testing/constraints-3.9.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1beta3/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1beta3/tests/__init__.py b/owl-bot-staging/v1beta3/tests/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta3/tests/unit/__init__.py b/owl-bot-staging/v1beta3/tests/unit/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/__init__.py b/owl-bot-staging/v1beta3/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/__init__.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py new file mode 100644 index 0000000..c696e63 --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py @@ -0,0 +1,1459 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataflow_v1beta3.services.flex_templates_service import FlexTemplatesServiceAsyncClient +from google.cloud.dataflow_v1beta3.services.flex_templates_service import FlexTemplatesServiceClient +from google.cloud.dataflow_v1beta3.services.flex_templates_service import transports +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from google.oauth2 import service_account +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert FlexTemplatesServiceClient._get_default_mtls_endpoint(None) is None + assert FlexTemplatesServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert FlexTemplatesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert FlexTemplatesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert FlexTemplatesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert FlexTemplatesServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FlexTemplatesServiceClient, "grpc"), + (FlexTemplatesServiceAsyncClient, "grpc_asyncio"), + (FlexTemplatesServiceClient, "rest"), +]) +def test_flex_templates_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.FlexTemplatesServiceGrpcTransport, "grpc"), + (transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.FlexTemplatesServiceRestTransport, "rest"), +]) +def test_flex_templates_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (FlexTemplatesServiceClient, "grpc"), + (FlexTemplatesServiceAsyncClient, "grpc_asyncio"), + (FlexTemplatesServiceClient, "rest"), +]) +def test_flex_templates_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +def test_flex_templates_service_client_get_transport_class(): + transport = FlexTemplatesServiceClient.get_transport_class() + available_transports = [ + transports.FlexTemplatesServiceGrpcTransport, + transports.FlexTemplatesServiceRestTransport, + ] + assert transport in available_transports + + transport = FlexTemplatesServiceClient.get_transport_class("grpc") + assert transport == transports.FlexTemplatesServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc"), + (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest"), +]) +@mock.patch.object(FlexTemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceClient)) +@mock.patch.object(FlexTemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceAsyncClient)) +def test_flex_templates_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(FlexTemplatesServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(FlexTemplatesServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc", "true"), + (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc", "false"), + (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest", "true"), + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(FlexTemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceClient)) +@mock.patch.object(FlexTemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_flex_templates_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + FlexTemplatesServiceClient, FlexTemplatesServiceAsyncClient +]) +@mock.patch.object(FlexTemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceClient)) +@mock.patch.object(FlexTemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceAsyncClient)) +def test_flex_templates_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc"), + (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest"), +]) +def test_flex_templates_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc", grpc_helpers), + (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest", None), +]) +def test_flex_templates_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_flex_templates_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = FlexTemplatesServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc", grpc_helpers), + (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_flex_templates_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=None, + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + templates.LaunchFlexTemplateRequest, + dict, +]) +def test_launch_flex_template(request_type, transport: str = 'grpc'): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_flex_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = templates.LaunchFlexTemplateResponse( + ) + response = client.launch_flex_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == templates.LaunchFlexTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchFlexTemplateResponse) + + +def test_launch_flex_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_flex_template), + '__call__') as call: + client.launch_flex_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == templates.LaunchFlexTemplateRequest() + +@pytest.mark.asyncio +async def test_launch_flex_template_async(transport: str = 'grpc_asyncio', request_type=templates.LaunchFlexTemplateRequest): + client = FlexTemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_flex_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(templates.LaunchFlexTemplateResponse( + )) + response = await client.launch_flex_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == templates.LaunchFlexTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchFlexTemplateResponse) + + +@pytest.mark.asyncio +async def test_launch_flex_template_async_from_dict(): + await test_launch_flex_template_async(request_type=dict) + + +def test_launch_flex_template_field_headers(): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = templates.LaunchFlexTemplateRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_flex_template), + '__call__') as call: + call.return_value = templates.LaunchFlexTemplateResponse() + client.launch_flex_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_launch_flex_template_field_headers_async(): + client = FlexTemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = templates.LaunchFlexTemplateRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_flex_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(templates.LaunchFlexTemplateResponse()) + await client.launch_flex_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + templates.LaunchFlexTemplateRequest, + dict, +]) +def test_launch_flex_template_rest(request_type): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = templates.LaunchFlexTemplateResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.LaunchFlexTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.launch_flex_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchFlexTemplateResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_launch_flex_template_rest_interceptors(null_interceptor): + transport = transports.FlexTemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.FlexTemplatesServiceRestInterceptor(), + ) + client = FlexTemplatesServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.FlexTemplatesServiceRestInterceptor, "post_launch_flex_template") as post, \ + mock.patch.object(transports.FlexTemplatesServiceRestInterceptor, "pre_launch_flex_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.LaunchFlexTemplateRequest.pb(templates.LaunchFlexTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.LaunchFlexTemplateResponse.to_json(templates.LaunchFlexTemplateResponse()) + + request = templates.LaunchFlexTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.LaunchFlexTemplateResponse() + + client.launch_flex_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_launch_flex_template_rest_bad_request(transport: str = 'rest', request_type=templates.LaunchFlexTemplateRequest): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.launch_flex_template(request) + + +def test_launch_flex_template_rest_error(): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.FlexTemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.FlexTemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FlexTemplatesServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.FlexTemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FlexTemplatesServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = FlexTemplatesServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.FlexTemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = FlexTemplatesServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.FlexTemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = FlexTemplatesServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.FlexTemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.FlexTemplatesServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.FlexTemplatesServiceGrpcTransport, + transports.FlexTemplatesServiceGrpcAsyncIOTransport, + transports.FlexTemplatesServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = FlexTemplatesServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.FlexTemplatesServiceGrpcTransport, + ) + +def test_flex_templates_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.FlexTemplatesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_flex_templates_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.FlexTemplatesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'launch_flex_template', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_flex_templates_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FlexTemplatesServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id="octopus", + ) + + +def test_flex_templates_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.FlexTemplatesServiceTransport() + adc.assert_called_once() + + +def test_flex_templates_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + FlexTemplatesServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FlexTemplatesServiceGrpcTransport, + transports.FlexTemplatesServiceGrpcAsyncIOTransport, + ], +) +def test_flex_templates_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.FlexTemplatesServiceGrpcTransport, + transports.FlexTemplatesServiceGrpcAsyncIOTransport, + transports.FlexTemplatesServiceRestTransport, + ], +) +def test_flex_templates_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.FlexTemplatesServiceGrpcTransport, grpc_helpers), + (transports.FlexTemplatesServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_flex_templates_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=["1", "2"], + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport]) +def test_flex_templates_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_flex_templates_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.FlexTemplatesServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_flex_templates_service_host_no_port(transport_name): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_flex_templates_service_host_with_port(transport_name): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_flex_templates_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = FlexTemplatesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = FlexTemplatesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.launch_flex_template._session + session2 = client2.transport.launch_flex_template._session + assert session1 != session2 +def test_flex_templates_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FlexTemplatesServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_flex_templates_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.FlexTemplatesServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport]) +def test_flex_templates_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport]) +def test_flex_templates_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = FlexTemplatesServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = FlexTemplatesServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = FlexTemplatesServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = FlexTemplatesServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = FlexTemplatesServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = FlexTemplatesServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = FlexTemplatesServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = FlexTemplatesServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = FlexTemplatesServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = FlexTemplatesServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = FlexTemplatesServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = FlexTemplatesServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = FlexTemplatesServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = FlexTemplatesServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = FlexTemplatesServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.FlexTemplatesServiceTransport, '_prep_wrapped_messages') as prep: + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.FlexTemplatesServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = FlexTemplatesServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = FlexTemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = FlexTemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport), + (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py new file mode 100644 index 0000000..fd61443 --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py @@ -0,0 +1,3644 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import JobsV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import JobsV1Beta3Client +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import transports +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import snapshots +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert JobsV1Beta3Client._get_default_mtls_endpoint(None) is None + assert JobsV1Beta3Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert JobsV1Beta3Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert JobsV1Beta3Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert JobsV1Beta3Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert JobsV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (JobsV1Beta3Client, "grpc"), + (JobsV1Beta3AsyncClient, "grpc_asyncio"), + (JobsV1Beta3Client, "rest"), +]) +def test_jobs_v1_beta3_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.JobsV1Beta3GrpcTransport, "grpc"), + (transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.JobsV1Beta3RestTransport, "rest"), +]) +def test_jobs_v1_beta3_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (JobsV1Beta3Client, "grpc"), + (JobsV1Beta3AsyncClient, "grpc_asyncio"), + (JobsV1Beta3Client, "rest"), +]) +def test_jobs_v1_beta3_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +def test_jobs_v1_beta3_client_get_transport_class(): + transport = JobsV1Beta3Client.get_transport_class() + available_transports = [ + transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3RestTransport, + ] + assert transport in available_transports + + transport = JobsV1Beta3Client.get_transport_class("grpc") + assert transport == transports.JobsV1Beta3GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc"), + (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), +]) +@mock.patch.object(JobsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3Client)) +@mock.patch.object(JobsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3AsyncClient)) +def test_jobs_v1_beta3_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(JobsV1Beta3Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(JobsV1Beta3Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc", "true"), + (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc", "false"), + (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "false"), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", "true"), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", "false"), +]) +@mock.patch.object(JobsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3Client)) +@mock.patch.object(JobsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_jobs_v1_beta3_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + JobsV1Beta3Client, JobsV1Beta3AsyncClient +]) +@mock.patch.object(JobsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3Client)) +@mock.patch.object(JobsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3AsyncClient)) +def test_jobs_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc"), + (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), +]) +def test_jobs_v1_beta3_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc", grpc_helpers), + (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", None), +]) +def test_jobs_v1_beta3_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_jobs_v1_beta3_client_client_options_from_dict(): + with mock.patch('google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = JobsV1Beta3Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc", grpc_helpers), + (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_jobs_v1_beta3_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=None, + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + jobs.CreateJobRequest, + dict, +]) +def test_create_job(request_type, transport: str = 'grpc'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + ) + response = client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +def test_create_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + client.create_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CreateJobRequest() + +@pytest.mark.asyncio +async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=jobs.CreateJobRequest): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + )) + response = await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +@pytest.mark.asyncio +async def test_create_job_async_from_dict(): + await test_create_job_async(request_type=dict) + + +def test_create_job_field_headers(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.CreateJobRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = jobs.Job() + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_field_headers_async(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.CreateJobRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + jobs.GetJobRequest, + dict, +]) +def test_get_job(request_type, transport: str = 'grpc'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.GetJobRequest() + +@pytest.mark.asyncio +async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=jobs.GetJobRequest): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + )) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + + +def test_get_job_field_headers(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.GetJobRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = jobs.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.GetJobRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + jobs.UpdateJobRequest, + dict, +]) +def test_update_job(request_type, transport: str = 'grpc'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + ) + response = client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.UpdateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +def test_update_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job), + '__call__') as call: + client.update_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.UpdateJobRequest() + +@pytest.mark.asyncio +async def test_update_job_async(transport: str = 'grpc_asyncio', request_type=jobs.UpdateJobRequest): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + )) + response = await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.UpdateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +@pytest.mark.asyncio +async def test_update_job_async_from_dict(): + await test_update_job_async(request_type=dict) + + +def test_update_job_field_headers(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.UpdateJobRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job), + '__call__') as call: + call.return_value = jobs.Job() + client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_job_field_headers_async(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.UpdateJobRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.update_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + jobs.ListJobsRequest, + dict, +]) +def test_list_jobs(request_type, transport: str = 'grpc'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.ListJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + +@pytest.mark.asyncio +async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=jobs.ListJobsRequest): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + + +def test_list_jobs_field_headers(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = jobs.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse()) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +def test_list_jobs_pager(transport_name: str = "grpc"): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('project_id', ''), + ('location', ''), + )), + ) + pager = client.list_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) + for i in results) +def test_list_jobs_pages(transport_name: str = "grpc"): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, jobs.Job) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_jobs(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + jobs.ListJobsRequest, + dict, +]) +def test_aggregated_list_jobs(request_type, transport: str = 'grpc'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.ListJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.aggregated_list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_aggregated_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__') as call: + client.aggregated_list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + +@pytest.mark.asyncio +async def test_aggregated_list_jobs_async(transport: str = 'grpc_asyncio', request_type=jobs.ListJobsRequest): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.aggregated_list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_aggregated_list_jobs_async_from_dict(): + await test_aggregated_list_jobs_async(request_type=dict) + + +def test_aggregated_list_jobs_field_headers(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = 'project_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__') as call: + call.return_value = jobs.ListJobsResponse() + client.aggregated_list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_aggregated_list_jobs_field_headers_async(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.ListJobsRequest() + + request.project_id = 'project_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse()) + await client.aggregated_list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value', + ) in kw['metadata'] + + +def test_aggregated_list_jobs_pager(transport_name: str = "grpc"): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('project_id', ''), + )), + ) + pager = client.aggregated_list_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) + for i in results) +def test_aggregated_list_jobs_pages(transport_name: str = "grpc"): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.aggregated_list_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_aggregated_list_jobs_async_pager(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.aggregated_list_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, jobs.Job) + for i in responses) + + +@pytest.mark.asyncio +async def test_aggregated_list_jobs_async_pages(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.aggregated_list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.aggregated_list_jobs(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + jobs.CheckActiveJobsRequest, + dict, +]) +def test_check_active_jobs(request_type, transport: str = 'grpc'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_active_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.CheckActiveJobsResponse( + active_jobs_exist=True, + ) + response = client.check_active_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CheckActiveJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.CheckActiveJobsResponse) + assert response.active_jobs_exist is True + + +def test_check_active_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_active_jobs), + '__call__') as call: + client.check_active_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CheckActiveJobsRequest() + +@pytest.mark.asyncio +async def test_check_active_jobs_async(transport: str = 'grpc_asyncio', request_type=jobs.CheckActiveJobsRequest): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.check_active_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.CheckActiveJobsResponse( + active_jobs_exist=True, + )) + response = await client.check_active_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.CheckActiveJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.CheckActiveJobsResponse) + assert response.active_jobs_exist is True + + +@pytest.mark.asyncio +async def test_check_active_jobs_async_from_dict(): + await test_check_active_jobs_async(request_type=dict) + + +@pytest.mark.parametrize("request_type", [ + jobs.SnapshotJobRequest, + dict, +]) +def test_snapshot_job(request_type, transport: str = 'grpc'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.snapshot_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = snapshots.Snapshot( + id='id_value', + project_id='project_id_value', + source_job_id='source_job_id_value', + state=snapshots.SnapshotState.PENDING, + description='description_value', + disk_size_bytes=1611, + region='region_value', + ) + response = client.snapshot_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SnapshotJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.source_job_id == 'source_job_id_value' + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == 'description_value' + assert response.disk_size_bytes == 1611 + assert response.region == 'region_value' + + +def test_snapshot_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.snapshot_job), + '__call__') as call: + client.snapshot_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SnapshotJobRequest() + +@pytest.mark.asyncio +async def test_snapshot_job_async(transport: str = 'grpc_asyncio', request_type=jobs.SnapshotJobRequest): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.snapshot_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(snapshots.Snapshot( + id='id_value', + project_id='project_id_value', + source_job_id='source_job_id_value', + state=snapshots.SnapshotState.PENDING, + description='description_value', + disk_size_bytes=1611, + region='region_value', + )) + response = await client.snapshot_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == jobs.SnapshotJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.source_job_id == 'source_job_id_value' + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == 'description_value' + assert response.disk_size_bytes == 1611 + assert response.region == 'region_value' + + +@pytest.mark.asyncio +async def test_snapshot_job_async_from_dict(): + await test_snapshot_job_async(request_type=dict) + + +def test_snapshot_job_field_headers(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SnapshotJobRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.snapshot_job), + '__call__') as call: + call.return_value = snapshots.Snapshot() + client.snapshot_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_snapshot_job_field_headers_async(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = jobs.SnapshotJobRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.snapshot_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(snapshots.Snapshot()) + await client.snapshot_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + jobs.CreateJobRequest, + dict, +]) +def test_create_job_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request_init["job"] = {'id': 'id_value', 'project_id': 'project_id_value', 'name': 'name_value', 'type_': 1, 'environment': {'temp_storage_prefix': 'temp_storage_prefix_value', 'cluster_manager_api_service': 'cluster_manager_api_service_value', 'experiments': ['experiments_value1', 'experiments_value2'], 'service_options': ['service_options_value1', 'service_options_value2'], 'service_kms_key_name': 'service_kms_key_name_value', 'worker_pools': [{'kind': 'kind_value', 'num_workers': 1212, 'packages': [{'name': 'name_value', 'location': 'location_value'}], 'default_package_set': 1, 'machine_type': 'machine_type_value', 'teardown_policy': 1, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'disk_source_image': 'disk_source_image_value', 'zone': 'zone_value', 'taskrunner_settings': {'task_user': 'task_user_value', 'task_group': 'task_group_value', 'oauth_scopes': ['oauth_scopes_value1', 'oauth_scopes_value2'], 'base_url': 'base_url_value', 'dataflow_api_version': 'dataflow_api_version_value', 'parallel_worker_settings': {'base_url': 'base_url_value', 'reporting_enabled': True, 'service_path': 'service_path_value', 'shuffle_service_path': 'shuffle_service_path_value', 'worker_id': 'worker_id_value', 'temp_storage_prefix': 'temp_storage_prefix_value'}, 'base_task_dir': 'base_task_dir_value', 'continue_on_exception': True, 'log_to_serialconsole': True, 'alsologtostderr': True, 'log_upload_location': 'log_upload_location_value', 'log_dir': 'log_dir_value', 'temp_storage_prefix': 'temp_storage_prefix_value', 'harness_command': 'harness_command_value', 'workflow_file_name': 'workflow_file_name_value', 'commandlines_file_name': 'commandlines_file_name_value', 'vm_id': 'vm_id_value', 'language_hint': 'language_hint_value', 'streaming_worker_main_class': 'streaming_worker_main_class_value'}, 'on_host_maintenance': 'on_host_maintenance_value', 'data_disks': [{'size_gb': 739, 'disk_type': 'disk_type_value', 'mount_point': 'mount_point_value'}], 'metadata': {}, 'autoscaling_settings': {'algorithm': 1, 'max_num_workers': 1633}, 'pool_args': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'worker_harness_container_image': 'worker_harness_container_image_value', 'num_threads_per_worker': 2361, 'ip_configuration': 1, 'sdk_harness_container_images': [{'container_image': 'container_image_value', 'use_single_core_per_container': True, 'environment_id': 'environment_id_value', 'capabilities': ['capabilities_value1', 'capabilities_value2']}]}], 'user_agent': {'fields': {}}, 'version': {}, 'dataset': 'dataset_value', 'sdk_pipeline_options': {}, 'internal_experiments': {}, 'service_account_email': 'service_account_email_value', 'flex_resource_scheduling_goal': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'shuffle_mode': 1, 'debug_options': {'enable_hot_key_logging': True}}, 'steps': [{'kind': 'kind_value', 'name': 'name_value', 'properties': {}}], 'steps_location': 'steps_location_value', 'current_state': 1, 'current_state_time': {'seconds': 751, 'nanos': 543}, 'requested_state': 1, 'execution_info': {'stages': {}}, 'create_time': {}, 'replace_job_id': 'replace_job_id_value', 'transform_name_mapping': {}, 'client_request_id': 'client_request_id_value', 'replaced_by_job_id': 'replaced_by_job_id_value', 'temp_files': ['temp_files_value1', 'temp_files_value2'], 'labels': {}, 'location': 'location_value', 'pipeline_description': {'original_pipeline_transform': [{'kind': 1, 'id': 'id_value', 'name': 'name_value', 'display_data': [{'key': 'key_value', 'namespace': 'namespace_value', 'str_value': 'str_value_value', 'int64_value': 1073, 'float_value': 0.117, 'java_class_value': 'java_class_value_value', 'timestamp_value': {}, 'duration_value': {'seconds': 751, 'nanos': 543}, 'bool_value': True, 'short_str_value': 'short_str_value_value', 'url': 'url_value', 'label': 'label_value'}], 'output_collection_name': ['output_collection_name_value1', 'output_collection_name_value2'], 'input_collection_name': ['input_collection_name_value1', 'input_collection_name_value2']}], 'execution_pipeline_stage': [{'name': 'name_value', 'id': 'id_value', 'kind': 1, 'input_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value', 'size_bytes': 1089}], 'output_source': {}, 'prerequisite_stage': ['prerequisite_stage_value1', 'prerequisite_stage_value2'], 'component_transform': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform': 'original_transform_value'}], 'component_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value'}]}], 'display_data': {}}, 'stage_states': [{'execution_stage_name': 'execution_stage_name_value', 'execution_stage_state': 1, 'current_state_time': {}}], 'job_metadata': {'sdk_version': {'version': 'version_value', 'version_display_name': 'version_display_name_value', 'sdk_support_status': 1}, 'spanner_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'database_id': 'database_id_value'}], 'bigquery_details': [{'table': 'table_value', 'dataset': 'dataset_value', 'project_id': 'project_id_value', 'query': 'query_value'}], 'big_table_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'table_id': 'table_id_value'}], 'pubsub_details': [{'topic': 'topic_value', 'subscription': 'subscription_value'}], 'file_details': [{'file_pattern': 'file_pattern_value'}], 'datastore_details': [{'namespace': 'namespace_value', 'project_id': 'project_id_value'}]}, 'start_time': {}, 'created_from_snapshot_id': 'created_from_snapshot_id_value', 'satisfies_pzs': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_create_job") as post, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_create_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.CreateJobRequest.pb(jobs.CreateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.CreateJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.create_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_rest_bad_request(transport: str = 'rest', request_type=jobs.CreateJobRequest): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request_init["job"] = {'id': 'id_value', 'project_id': 'project_id_value', 'name': 'name_value', 'type_': 1, 'environment': {'temp_storage_prefix': 'temp_storage_prefix_value', 'cluster_manager_api_service': 'cluster_manager_api_service_value', 'experiments': ['experiments_value1', 'experiments_value2'], 'service_options': ['service_options_value1', 'service_options_value2'], 'service_kms_key_name': 'service_kms_key_name_value', 'worker_pools': [{'kind': 'kind_value', 'num_workers': 1212, 'packages': [{'name': 'name_value', 'location': 'location_value'}], 'default_package_set': 1, 'machine_type': 'machine_type_value', 'teardown_policy': 1, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'disk_source_image': 'disk_source_image_value', 'zone': 'zone_value', 'taskrunner_settings': {'task_user': 'task_user_value', 'task_group': 'task_group_value', 'oauth_scopes': ['oauth_scopes_value1', 'oauth_scopes_value2'], 'base_url': 'base_url_value', 'dataflow_api_version': 'dataflow_api_version_value', 'parallel_worker_settings': {'base_url': 'base_url_value', 'reporting_enabled': True, 'service_path': 'service_path_value', 'shuffle_service_path': 'shuffle_service_path_value', 'worker_id': 'worker_id_value', 'temp_storage_prefix': 'temp_storage_prefix_value'}, 'base_task_dir': 'base_task_dir_value', 'continue_on_exception': True, 'log_to_serialconsole': True, 'alsologtostderr': True, 'log_upload_location': 'log_upload_location_value', 'log_dir': 'log_dir_value', 'temp_storage_prefix': 'temp_storage_prefix_value', 'harness_command': 'harness_command_value', 'workflow_file_name': 'workflow_file_name_value', 'commandlines_file_name': 'commandlines_file_name_value', 'vm_id': 'vm_id_value', 'language_hint': 'language_hint_value', 'streaming_worker_main_class': 'streaming_worker_main_class_value'}, 'on_host_maintenance': 'on_host_maintenance_value', 'data_disks': [{'size_gb': 739, 'disk_type': 'disk_type_value', 'mount_point': 'mount_point_value'}], 'metadata': {}, 'autoscaling_settings': {'algorithm': 1, 'max_num_workers': 1633}, 'pool_args': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'worker_harness_container_image': 'worker_harness_container_image_value', 'num_threads_per_worker': 2361, 'ip_configuration': 1, 'sdk_harness_container_images': [{'container_image': 'container_image_value', 'use_single_core_per_container': True, 'environment_id': 'environment_id_value', 'capabilities': ['capabilities_value1', 'capabilities_value2']}]}], 'user_agent': {'fields': {}}, 'version': {}, 'dataset': 'dataset_value', 'sdk_pipeline_options': {}, 'internal_experiments': {}, 'service_account_email': 'service_account_email_value', 'flex_resource_scheduling_goal': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'shuffle_mode': 1, 'debug_options': {'enable_hot_key_logging': True}}, 'steps': [{'kind': 'kind_value', 'name': 'name_value', 'properties': {}}], 'steps_location': 'steps_location_value', 'current_state': 1, 'current_state_time': {'seconds': 751, 'nanos': 543}, 'requested_state': 1, 'execution_info': {'stages': {}}, 'create_time': {}, 'replace_job_id': 'replace_job_id_value', 'transform_name_mapping': {}, 'client_request_id': 'client_request_id_value', 'replaced_by_job_id': 'replaced_by_job_id_value', 'temp_files': ['temp_files_value1', 'temp_files_value2'], 'labels': {}, 'location': 'location_value', 'pipeline_description': {'original_pipeline_transform': [{'kind': 1, 'id': 'id_value', 'name': 'name_value', 'display_data': [{'key': 'key_value', 'namespace': 'namespace_value', 'str_value': 'str_value_value', 'int64_value': 1073, 'float_value': 0.117, 'java_class_value': 'java_class_value_value', 'timestamp_value': {}, 'duration_value': {'seconds': 751, 'nanos': 543}, 'bool_value': True, 'short_str_value': 'short_str_value_value', 'url': 'url_value', 'label': 'label_value'}], 'output_collection_name': ['output_collection_name_value1', 'output_collection_name_value2'], 'input_collection_name': ['input_collection_name_value1', 'input_collection_name_value2']}], 'execution_pipeline_stage': [{'name': 'name_value', 'id': 'id_value', 'kind': 1, 'input_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value', 'size_bytes': 1089}], 'output_source': {}, 'prerequisite_stage': ['prerequisite_stage_value1', 'prerequisite_stage_value2'], 'component_transform': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform': 'original_transform_value'}], 'component_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value'}]}], 'display_data': {}}, 'stage_states': [{'execution_stage_name': 'execution_stage_name_value', 'execution_stage_state': 1, 'current_state_time': {}}], 'job_metadata': {'sdk_version': {'version': 'version_value', 'version_display_name': 'version_display_name_value', 'sdk_support_status': 1}, 'spanner_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'database_id': 'database_id_value'}], 'bigquery_details': [{'table': 'table_value', 'dataset': 'dataset_value', 'project_id': 'project_id_value', 'query': 'query_value'}], 'big_table_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'table_id': 'table_id_value'}], 'pubsub_details': [{'topic': 'topic_value', 'subscription': 'subscription_value'}], 'file_details': [{'file_pattern': 'file_pattern_value'}], 'datastore_details': [{'namespace': 'namespace_value', 'project_id': 'project_id_value'}]}, 'start_time': {}, 'created_from_snapshot_id': 'created_from_snapshot_id_value', 'satisfies_pzs': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job(request) + + +def test_create_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + jobs.GetJobRequest, + dict, +]) +def test_get_job_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_get_job") as post, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_get_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.GetJobRequest.pb(jobs.GetJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.GetJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_rest_bad_request(transport: str = 'rest', request_type=jobs.GetJobRequest): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job(request) + + +def test_get_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + jobs.UpdateJobRequest, + dict, +]) +def test_update_job_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request_init["job"] = {'id': 'id_value', 'project_id': 'project_id_value', 'name': 'name_value', 'type_': 1, 'environment': {'temp_storage_prefix': 'temp_storage_prefix_value', 'cluster_manager_api_service': 'cluster_manager_api_service_value', 'experiments': ['experiments_value1', 'experiments_value2'], 'service_options': ['service_options_value1', 'service_options_value2'], 'service_kms_key_name': 'service_kms_key_name_value', 'worker_pools': [{'kind': 'kind_value', 'num_workers': 1212, 'packages': [{'name': 'name_value', 'location': 'location_value'}], 'default_package_set': 1, 'machine_type': 'machine_type_value', 'teardown_policy': 1, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'disk_source_image': 'disk_source_image_value', 'zone': 'zone_value', 'taskrunner_settings': {'task_user': 'task_user_value', 'task_group': 'task_group_value', 'oauth_scopes': ['oauth_scopes_value1', 'oauth_scopes_value2'], 'base_url': 'base_url_value', 'dataflow_api_version': 'dataflow_api_version_value', 'parallel_worker_settings': {'base_url': 'base_url_value', 'reporting_enabled': True, 'service_path': 'service_path_value', 'shuffle_service_path': 'shuffle_service_path_value', 'worker_id': 'worker_id_value', 'temp_storage_prefix': 'temp_storage_prefix_value'}, 'base_task_dir': 'base_task_dir_value', 'continue_on_exception': True, 'log_to_serialconsole': True, 'alsologtostderr': True, 'log_upload_location': 'log_upload_location_value', 'log_dir': 'log_dir_value', 'temp_storage_prefix': 'temp_storage_prefix_value', 'harness_command': 'harness_command_value', 'workflow_file_name': 'workflow_file_name_value', 'commandlines_file_name': 'commandlines_file_name_value', 'vm_id': 'vm_id_value', 'language_hint': 'language_hint_value', 'streaming_worker_main_class': 'streaming_worker_main_class_value'}, 'on_host_maintenance': 'on_host_maintenance_value', 'data_disks': [{'size_gb': 739, 'disk_type': 'disk_type_value', 'mount_point': 'mount_point_value'}], 'metadata': {}, 'autoscaling_settings': {'algorithm': 1, 'max_num_workers': 1633}, 'pool_args': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'worker_harness_container_image': 'worker_harness_container_image_value', 'num_threads_per_worker': 2361, 'ip_configuration': 1, 'sdk_harness_container_images': [{'container_image': 'container_image_value', 'use_single_core_per_container': True, 'environment_id': 'environment_id_value', 'capabilities': ['capabilities_value1', 'capabilities_value2']}]}], 'user_agent': {'fields': {}}, 'version': {}, 'dataset': 'dataset_value', 'sdk_pipeline_options': {}, 'internal_experiments': {}, 'service_account_email': 'service_account_email_value', 'flex_resource_scheduling_goal': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'shuffle_mode': 1, 'debug_options': {'enable_hot_key_logging': True}}, 'steps': [{'kind': 'kind_value', 'name': 'name_value', 'properties': {}}], 'steps_location': 'steps_location_value', 'current_state': 1, 'current_state_time': {'seconds': 751, 'nanos': 543}, 'requested_state': 1, 'execution_info': {'stages': {}}, 'create_time': {}, 'replace_job_id': 'replace_job_id_value', 'transform_name_mapping': {}, 'client_request_id': 'client_request_id_value', 'replaced_by_job_id': 'replaced_by_job_id_value', 'temp_files': ['temp_files_value1', 'temp_files_value2'], 'labels': {}, 'location': 'location_value', 'pipeline_description': {'original_pipeline_transform': [{'kind': 1, 'id': 'id_value', 'name': 'name_value', 'display_data': [{'key': 'key_value', 'namespace': 'namespace_value', 'str_value': 'str_value_value', 'int64_value': 1073, 'float_value': 0.117, 'java_class_value': 'java_class_value_value', 'timestamp_value': {}, 'duration_value': {'seconds': 751, 'nanos': 543}, 'bool_value': True, 'short_str_value': 'short_str_value_value', 'url': 'url_value', 'label': 'label_value'}], 'output_collection_name': ['output_collection_name_value1', 'output_collection_name_value2'], 'input_collection_name': ['input_collection_name_value1', 'input_collection_name_value2']}], 'execution_pipeline_stage': [{'name': 'name_value', 'id': 'id_value', 'kind': 1, 'input_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value', 'size_bytes': 1089}], 'output_source': {}, 'prerequisite_stage': ['prerequisite_stage_value1', 'prerequisite_stage_value2'], 'component_transform': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform': 'original_transform_value'}], 'component_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value'}]}], 'display_data': {}}, 'stage_states': [{'execution_stage_name': 'execution_stage_name_value', 'execution_stage_state': 1, 'current_state_time': {}}], 'job_metadata': {'sdk_version': {'version': 'version_value', 'version_display_name': 'version_display_name_value', 'sdk_support_status': 1}, 'spanner_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'database_id': 'database_id_value'}], 'bigquery_details': [{'table': 'table_value', 'dataset': 'dataset_value', 'project_id': 'project_id_value', 'query': 'query_value'}], 'big_table_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'table_id': 'table_id_value'}], 'pubsub_details': [{'topic': 'topic_value', 'subscription': 'subscription_value'}], 'file_details': [{'file_pattern': 'file_pattern_value'}], 'datastore_details': [{'namespace': 'namespace_value', 'project_id': 'project_id_value'}]}, 'start_time': {}, 'created_from_snapshot_id': 'created_from_snapshot_id_value', 'satisfies_pzs': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_update_job") as post, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_update_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.UpdateJobRequest.pb(jobs.UpdateJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = jobs.UpdateJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.update_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_job_rest_bad_request(transport: str = 'rest', request_type=jobs.UpdateJobRequest): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request_init["job"] = {'id': 'id_value', 'project_id': 'project_id_value', 'name': 'name_value', 'type_': 1, 'environment': {'temp_storage_prefix': 'temp_storage_prefix_value', 'cluster_manager_api_service': 'cluster_manager_api_service_value', 'experiments': ['experiments_value1', 'experiments_value2'], 'service_options': ['service_options_value1', 'service_options_value2'], 'service_kms_key_name': 'service_kms_key_name_value', 'worker_pools': [{'kind': 'kind_value', 'num_workers': 1212, 'packages': [{'name': 'name_value', 'location': 'location_value'}], 'default_package_set': 1, 'machine_type': 'machine_type_value', 'teardown_policy': 1, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'disk_source_image': 'disk_source_image_value', 'zone': 'zone_value', 'taskrunner_settings': {'task_user': 'task_user_value', 'task_group': 'task_group_value', 'oauth_scopes': ['oauth_scopes_value1', 'oauth_scopes_value2'], 'base_url': 'base_url_value', 'dataflow_api_version': 'dataflow_api_version_value', 'parallel_worker_settings': {'base_url': 'base_url_value', 'reporting_enabled': True, 'service_path': 'service_path_value', 'shuffle_service_path': 'shuffle_service_path_value', 'worker_id': 'worker_id_value', 'temp_storage_prefix': 'temp_storage_prefix_value'}, 'base_task_dir': 'base_task_dir_value', 'continue_on_exception': True, 'log_to_serialconsole': True, 'alsologtostderr': True, 'log_upload_location': 'log_upload_location_value', 'log_dir': 'log_dir_value', 'temp_storage_prefix': 'temp_storage_prefix_value', 'harness_command': 'harness_command_value', 'workflow_file_name': 'workflow_file_name_value', 'commandlines_file_name': 'commandlines_file_name_value', 'vm_id': 'vm_id_value', 'language_hint': 'language_hint_value', 'streaming_worker_main_class': 'streaming_worker_main_class_value'}, 'on_host_maintenance': 'on_host_maintenance_value', 'data_disks': [{'size_gb': 739, 'disk_type': 'disk_type_value', 'mount_point': 'mount_point_value'}], 'metadata': {}, 'autoscaling_settings': {'algorithm': 1, 'max_num_workers': 1633}, 'pool_args': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'worker_harness_container_image': 'worker_harness_container_image_value', 'num_threads_per_worker': 2361, 'ip_configuration': 1, 'sdk_harness_container_images': [{'container_image': 'container_image_value', 'use_single_core_per_container': True, 'environment_id': 'environment_id_value', 'capabilities': ['capabilities_value1', 'capabilities_value2']}]}], 'user_agent': {'fields': {}}, 'version': {}, 'dataset': 'dataset_value', 'sdk_pipeline_options': {}, 'internal_experiments': {}, 'service_account_email': 'service_account_email_value', 'flex_resource_scheduling_goal': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'shuffle_mode': 1, 'debug_options': {'enable_hot_key_logging': True}}, 'steps': [{'kind': 'kind_value', 'name': 'name_value', 'properties': {}}], 'steps_location': 'steps_location_value', 'current_state': 1, 'current_state_time': {'seconds': 751, 'nanos': 543}, 'requested_state': 1, 'execution_info': {'stages': {}}, 'create_time': {}, 'replace_job_id': 'replace_job_id_value', 'transform_name_mapping': {}, 'client_request_id': 'client_request_id_value', 'replaced_by_job_id': 'replaced_by_job_id_value', 'temp_files': ['temp_files_value1', 'temp_files_value2'], 'labels': {}, 'location': 'location_value', 'pipeline_description': {'original_pipeline_transform': [{'kind': 1, 'id': 'id_value', 'name': 'name_value', 'display_data': [{'key': 'key_value', 'namespace': 'namespace_value', 'str_value': 'str_value_value', 'int64_value': 1073, 'float_value': 0.117, 'java_class_value': 'java_class_value_value', 'timestamp_value': {}, 'duration_value': {'seconds': 751, 'nanos': 543}, 'bool_value': True, 'short_str_value': 'short_str_value_value', 'url': 'url_value', 'label': 'label_value'}], 'output_collection_name': ['output_collection_name_value1', 'output_collection_name_value2'], 'input_collection_name': ['input_collection_name_value1', 'input_collection_name_value2']}], 'execution_pipeline_stage': [{'name': 'name_value', 'id': 'id_value', 'kind': 1, 'input_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value', 'size_bytes': 1089}], 'output_source': {}, 'prerequisite_stage': ['prerequisite_stage_value1', 'prerequisite_stage_value2'], 'component_transform': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform': 'original_transform_value'}], 'component_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value'}]}], 'display_data': {}}, 'stage_states': [{'execution_stage_name': 'execution_stage_name_value', 'execution_stage_state': 1, 'current_state_time': {}}], 'job_metadata': {'sdk_version': {'version': 'version_value', 'version_display_name': 'version_display_name_value', 'sdk_support_status': 1}, 'spanner_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'database_id': 'database_id_value'}], 'bigquery_details': [{'table': 'table_value', 'dataset': 'dataset_value', 'project_id': 'project_id_value', 'query': 'query_value'}], 'big_table_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'table_id': 'table_id_value'}], 'pubsub_details': [{'topic': 'topic_value', 'subscription': 'subscription_value'}], 'file_details': [{'file_pattern': 'file_pattern_value'}], 'datastore_details': [{'namespace': 'namespace_value', 'project_id': 'project_id_value'}]}, 'start_time': {}, 'created_from_snapshot_id': 'created_from_snapshot_id_value', 'satisfies_pzs': True} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_job(request) + + +def test_update_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + jobs.ListJobsRequest, + dict, +]) +def test_list_jobs_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_jobs_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_list_jobs") as post, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_list_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.ListJobsResponse.to_json(jobs.ListJobsResponse()) + + request = jobs.ListJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.ListJobsResponse() + + client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_jobs_rest_bad_request(transport: str = 'rest', request_type=jobs.ListJobsRequest): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_jobs(request) + + +def test_list_jobs_rest_pager(transport: str = 'rest'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1', 'location': 'sample2'} + + pager = client.list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) + for i in results) + + pages = list(client.list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + jobs.ListJobsRequest, + dict, +]) +def test_aggregated_list_jobs_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = jobs.ListJobsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.ListJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.aggregated_list_jobs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AggregatedListJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_aggregated_list_jobs_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_aggregated_list_jobs") as post, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_aggregated_list_jobs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.ListJobsResponse.to_json(jobs.ListJobsResponse()) + + request = jobs.ListJobsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.ListJobsResponse() + + client.aggregated_list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_aggregated_list_jobs_rest_bad_request(transport: str = 'rest', request_type=jobs.ListJobsRequest): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list_jobs(request) + + +def test_aggregated_list_jobs_rest_pager(transport: str = 'rest'): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + jobs.Job(), + ], + next_page_token='abc', + ), + jobs.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + ], + next_page_token='ghi', + ), + jobs.ListJobsResponse( + jobs=[ + jobs.Job(), + jobs.Job(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1'} + + pager = client.aggregated_list_jobs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, jobs.Job) + for i in results) + + pages = list(client.aggregated_list_jobs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_check_active_jobs_rest_no_http_options(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = jobs.CheckActiveJobsRequest() + with pytest.raises(RuntimeError): + client.check_active_jobs(request) + + +@pytest.mark.parametrize("request_type", [ + jobs.SnapshotJobRequest, + dict, +]) +def test_snapshot_job_rest(request_type): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.Snapshot( + id='id_value', + project_id='project_id_value', + source_job_id='source_job_id_value', + state=snapshots.SnapshotState.PENDING, + description='description_value', + disk_size_bytes=1611, + region='region_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.snapshot_job(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.source_job_id == 'source_job_id_value' + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == 'description_value' + assert response.disk_size_bytes == 1611 + assert response.region == 'region_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_snapshot_job_rest_interceptors(null_interceptor): + transport = transports.JobsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), + ) + client = JobsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_snapshot_job") as post, \ + mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_snapshot_job") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = jobs.SnapshotJobRequest.pb(jobs.SnapshotJobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.Snapshot.to_json(snapshots.Snapshot()) + + request = jobs.SnapshotJobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.Snapshot() + + client.snapshot_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_snapshot_job_rest_bad_request(transport: str = 'rest', request_type=jobs.SnapshotJobRequest): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.snapshot_job(request) + + +def test_snapshot_job_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_check_active_jobs_rest_error(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.check_active_jobs({}) + assert ("Cannot define a method without a valid 'google.api.http' annotation." + in str(runtime_error.value)) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = JobsV1Beta3Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = JobsV1Beta3Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.JobsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.JobsV1Beta3GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3GrpcAsyncIOTransport, + transports.JobsV1Beta3RestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = JobsV1Beta3Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.JobsV1Beta3GrpcTransport, + ) + +def test_jobs_v1_beta3_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.JobsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_jobs_v1_beta3_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.JobsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_job', + 'get_job', + 'update_job', + 'list_jobs', + 'aggregated_list_jobs', + 'check_active_jobs', + 'snapshot_job', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_jobs_v1_beta3_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.JobsV1Beta3Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id="octopus", + ) + + +def test_jobs_v1_beta3_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.JobsV1Beta3Transport() + adc.assert_called_once() + + +def test_jobs_v1_beta3_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + JobsV1Beta3Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3GrpcAsyncIOTransport, + ], +) +def test_jobs_v1_beta3_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.JobsV1Beta3GrpcTransport, + transports.JobsV1Beta3GrpcAsyncIOTransport, + transports.JobsV1Beta3RestTransport, + ], +) +def test_jobs_v1_beta3_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.JobsV1Beta3GrpcTransport, grpc_helpers), + (transports.JobsV1Beta3GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_jobs_v1_beta3_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=["1", "2"], + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport]) +def test_jobs_v1_beta3_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_jobs_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.JobsV1Beta3RestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_jobs_v1_beta3_host_no_port(transport_name): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_jobs_v1_beta3_host_with_port(transport_name): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_jobs_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = JobsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = JobsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_job._session + session2 = client2.transport.create_job._session + assert session1 != session2 + session1 = client1.transport.get_job._session + session2 = client2.transport.get_job._session + assert session1 != session2 + session1 = client1.transport.update_job._session + session2 = client2.transport.update_job._session + assert session1 != session2 + session1 = client1.transport.list_jobs._session + session2 = client2.transport.list_jobs._session + assert session1 != session2 + session1 = client1.transport.aggregated_list_jobs._session + session2 = client2.transport.aggregated_list_jobs._session + assert session1 != session2 + session1 = client1.transport.check_active_jobs._session + session2 = client2.transport.check_active_jobs._session + assert session1 != session2 + session1 = client1.transport.snapshot_job._session + session2 = client2.transport.snapshot_job._session + assert session1 != session2 +def test_jobs_v1_beta3_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.JobsV1Beta3GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_jobs_v1_beta3_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.JobsV1Beta3GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport]) +def test_jobs_v1_beta3_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport]) +def test_jobs_v1_beta3_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = JobsV1Beta3Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = JobsV1Beta3Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = JobsV1Beta3Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = JobsV1Beta3Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = JobsV1Beta3Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = JobsV1Beta3Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = JobsV1Beta3Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = JobsV1Beta3Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = JobsV1Beta3Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = JobsV1Beta3Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = JobsV1Beta3Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = JobsV1Beta3Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = JobsV1Beta3Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = JobsV1Beta3Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = JobsV1Beta3Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.JobsV1Beta3Transport, '_prep_wrapped_messages') as prep: + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.JobsV1Beta3Transport, '_prep_wrapped_messages') as prep: + transport_class = JobsV1Beta3Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = JobsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = JobsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport), + (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py new file mode 100644 index 0000000..a03218e --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py @@ -0,0 +1,1713 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import MessagesV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import MessagesV1Beta3Client +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import transports +from google.cloud.dataflow_v1beta3.types import messages +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MessagesV1Beta3Client._get_default_mtls_endpoint(None) is None + assert MessagesV1Beta3Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MessagesV1Beta3Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MessagesV1Beta3Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MessagesV1Beta3Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MessagesV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MessagesV1Beta3Client, "grpc"), + (MessagesV1Beta3AsyncClient, "grpc_asyncio"), + (MessagesV1Beta3Client, "rest"), +]) +def test_messages_v1_beta3_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MessagesV1Beta3GrpcTransport, "grpc"), + (transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MessagesV1Beta3RestTransport, "rest"), +]) +def test_messages_v1_beta3_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MessagesV1Beta3Client, "grpc"), + (MessagesV1Beta3AsyncClient, "grpc_asyncio"), + (MessagesV1Beta3Client, "rest"), +]) +def test_messages_v1_beta3_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +def test_messages_v1_beta3_client_get_transport_class(): + transport = MessagesV1Beta3Client.get_transport_class() + available_transports = [ + transports.MessagesV1Beta3GrpcTransport, + transports.MessagesV1Beta3RestTransport, + ] + assert transport in available_transports + + transport = MessagesV1Beta3Client.get_transport_class("grpc") + assert transport == transports.MessagesV1Beta3GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc"), + (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), +]) +@mock.patch.object(MessagesV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3Client)) +@mock.patch.object(MessagesV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3AsyncClient)) +def test_messages_v1_beta3_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MessagesV1Beta3Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MessagesV1Beta3Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc", "true"), + (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc", "false"), + (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "false"), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest", "true"), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest", "false"), +]) +@mock.patch.object(MessagesV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3Client)) +@mock.patch.object(MessagesV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_messages_v1_beta3_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + MessagesV1Beta3Client, MessagesV1Beta3AsyncClient +]) +@mock.patch.object(MessagesV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3Client)) +@mock.patch.object(MessagesV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3AsyncClient)) +def test_messages_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc"), + (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), +]) +def test_messages_v1_beta3_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc", grpc_helpers), + (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest", None), +]) +def test_messages_v1_beta3_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_messages_v1_beta3_client_client_options_from_dict(): + with mock.patch('google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = MessagesV1Beta3Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc", grpc_helpers), + (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_messages_v1_beta3_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=None, + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + messages.ListJobMessagesRequest, + dict, +]) +def test_list_job_messages(request_type, transport: str = 'grpc'): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = messages.ListJobMessagesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_job_messages(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == messages.ListJobMessagesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobMessagesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_messages_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__') as call: + client.list_job_messages() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == messages.ListJobMessagesRequest() + +@pytest.mark.asyncio +async def test_list_job_messages_async(transport: str = 'grpc_asyncio', request_type=messages.ListJobMessagesRequest): + client = MessagesV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(messages.ListJobMessagesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_job_messages(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == messages.ListJobMessagesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobMessagesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_job_messages_async_from_dict(): + await test_list_job_messages_async(request_type=dict) + + +def test_list_job_messages_field_headers(): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = messages.ListJobMessagesRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__') as call: + call.return_value = messages.ListJobMessagesResponse() + client.list_job_messages(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_messages_field_headers_async(): + client = MessagesV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = messages.ListJobMessagesRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(messages.ListJobMessagesResponse()) + await client.list_job_messages(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +def test_list_job_messages_pager(transport_name: str = "grpc"): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + messages.JobMessage(), + ], + next_page_token='abc', + ), + messages.ListJobMessagesResponse( + job_messages=[], + next_page_token='def', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token='ghi', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('project_id', ''), + ('location', ''), + ('job_id', ''), + )), + ) + pager = client.list_job_messages(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, messages.JobMessage) + for i in results) +def test_list_job_messages_pages(transport_name: str = "grpc"): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + messages.JobMessage(), + ], + next_page_token='abc', + ), + messages.ListJobMessagesResponse( + job_messages=[], + next_page_token='def', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token='ghi', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_messages(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_messages_async_pager(): + client = MessagesV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + messages.JobMessage(), + ], + next_page_token='abc', + ), + messages.ListJobMessagesResponse( + job_messages=[], + next_page_token='def', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token='ghi', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_messages(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, messages.JobMessage) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_messages_async_pages(): + client = MessagesV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_messages), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + messages.JobMessage(), + ], + next_page_token='abc', + ), + messages.ListJobMessagesResponse( + job_messages=[], + next_page_token='def', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token='ghi', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_job_messages(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + messages.ListJobMessagesRequest, + dict, +]) +def test_list_job_messages_rest(request_type): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = messages.ListJobMessagesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = messages.ListJobMessagesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_job_messages(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobMessagesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_job_messages_rest_interceptors(null_interceptor): + transport = transports.MessagesV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MessagesV1Beta3RestInterceptor(), + ) + client = MessagesV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MessagesV1Beta3RestInterceptor, "post_list_job_messages") as post, \ + mock.patch.object(transports.MessagesV1Beta3RestInterceptor, "pre_list_job_messages") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = messages.ListJobMessagesRequest.pb(messages.ListJobMessagesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = messages.ListJobMessagesResponse.to_json(messages.ListJobMessagesResponse()) + + request = messages.ListJobMessagesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = messages.ListJobMessagesResponse() + + client.list_job_messages(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_job_messages_rest_bad_request(transport: str = 'rest', request_type=messages.ListJobMessagesRequest): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_job_messages(request) + + +def test_list_job_messages_rest_pager(transport: str = 'rest'): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + messages.JobMessage(), + ], + next_page_token='abc', + ), + messages.ListJobMessagesResponse( + job_messages=[], + next_page_token='def', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + ], + next_page_token='ghi', + ), + messages.ListJobMessagesResponse( + job_messages=[ + messages.JobMessage(), + messages.JobMessage(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(messages.ListJobMessagesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + + pager = client.list_job_messages(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, messages.JobMessage) + for i in results) + + pages = list(client.list_job_messages(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MessagesV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MessagesV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MessagesV1Beta3Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MessagesV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MessagesV1Beta3Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MessagesV1Beta3Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MessagesV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MessagesV1Beta3Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MessagesV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MessagesV1Beta3Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MessagesV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MessagesV1Beta3GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.MessagesV1Beta3GrpcTransport, + transports.MessagesV1Beta3GrpcAsyncIOTransport, + transports.MessagesV1Beta3RestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = MessagesV1Beta3Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MessagesV1Beta3GrpcTransport, + ) + +def test_messages_v1_beta3_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MessagesV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_messages_v1_beta3_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.MessagesV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_job_messages', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_messages_v1_beta3_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MessagesV1Beta3Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id="octopus", + ) + + +def test_messages_v1_beta3_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MessagesV1Beta3Transport() + adc.assert_called_once() + + +def test_messages_v1_beta3_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MessagesV1Beta3Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MessagesV1Beta3GrpcTransport, + transports.MessagesV1Beta3GrpcAsyncIOTransport, + ], +) +def test_messages_v1_beta3_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MessagesV1Beta3GrpcTransport, + transports.MessagesV1Beta3GrpcAsyncIOTransport, + transports.MessagesV1Beta3RestTransport, + ], +) +def test_messages_v1_beta3_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MessagesV1Beta3GrpcTransport, grpc_helpers), + (transports.MessagesV1Beta3GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_messages_v1_beta3_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=["1", "2"], + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport]) +def test_messages_v1_beta3_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_messages_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.MessagesV1Beta3RestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_messages_v1_beta3_host_no_port(transport_name): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_messages_v1_beta3_host_with_port(transport_name): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_messages_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MessagesV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = MessagesV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_job_messages._session + session2 = client2.transport.list_job_messages._session + assert session1 != session2 +def test_messages_v1_beta3_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MessagesV1Beta3GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_messages_v1_beta3_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MessagesV1Beta3GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport]) +def test_messages_v1_beta3_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport]) +def test_messages_v1_beta3_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MessagesV1Beta3Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = MessagesV1Beta3Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MessagesV1Beta3Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = MessagesV1Beta3Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = MessagesV1Beta3Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MessagesV1Beta3Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MessagesV1Beta3Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = MessagesV1Beta3Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MessagesV1Beta3Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = MessagesV1Beta3Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = MessagesV1Beta3Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MessagesV1Beta3Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MessagesV1Beta3Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = MessagesV1Beta3Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MessagesV1Beta3Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MessagesV1Beta3Transport, '_prep_wrapped_messages') as prep: + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MessagesV1Beta3Transport, '_prep_wrapped_messages') as prep: + transport_class = MessagesV1Beta3Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MessagesV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = MessagesV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport), + (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py new file mode 100644 index 0000000..60df478 --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py @@ -0,0 +1,2477 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import MetricsV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import MetricsV1Beta3Client +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers +from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import transports +from google.cloud.dataflow_v1beta3.types import metrics +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetricsV1Beta3Client._get_default_mtls_endpoint(None) is None + assert MetricsV1Beta3Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MetricsV1Beta3Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MetricsV1Beta3Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MetricsV1Beta3Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MetricsV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MetricsV1Beta3Client, "grpc"), + (MetricsV1Beta3AsyncClient, "grpc_asyncio"), + (MetricsV1Beta3Client, "rest"), +]) +def test_metrics_v1_beta3_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MetricsV1Beta3GrpcTransport, "grpc"), + (transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MetricsV1Beta3RestTransport, "rest"), +]) +def test_metrics_v1_beta3_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (MetricsV1Beta3Client, "grpc"), + (MetricsV1Beta3AsyncClient, "grpc_asyncio"), + (MetricsV1Beta3Client, "rest"), +]) +def test_metrics_v1_beta3_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +def test_metrics_v1_beta3_client_get_transport_class(): + transport = MetricsV1Beta3Client.get_transport_class() + available_transports = [ + transports.MetricsV1Beta3GrpcTransport, + transports.MetricsV1Beta3RestTransport, + ] + assert transport in available_transports + + transport = MetricsV1Beta3Client.get_transport_class("grpc") + assert transport == transports.MetricsV1Beta3GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc"), + (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), +]) +@mock.patch.object(MetricsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3Client)) +@mock.patch.object(MetricsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3AsyncClient)) +def test_metrics_v1_beta3_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetricsV1Beta3Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetricsV1Beta3Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc", "true"), + (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc", "false"), + (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "false"), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", "true"), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", "false"), +]) +@mock.patch.object(MetricsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3Client)) +@mock.patch.object(MetricsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metrics_v1_beta3_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + MetricsV1Beta3Client, MetricsV1Beta3AsyncClient +]) +@mock.patch.object(MetricsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3Client)) +@mock.patch.object(MetricsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3AsyncClient)) +def test_metrics_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc"), + (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), +]) +def test_metrics_v1_beta3_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc", grpc_helpers), + (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", None), +]) +def test_metrics_v1_beta3_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_metrics_v1_beta3_client_client_options_from_dict(): + with mock.patch('google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = MetricsV1Beta3Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc", grpc_helpers), + (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_metrics_v1_beta3_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=None, + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + metrics.GetJobMetricsRequest, + dict, +]) +def test_get_job_metrics(request_type, transport: str = 'grpc'): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metrics.JobMetrics( + ) + response = client.get_job_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetJobMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metrics.JobMetrics) + + +def test_get_job_metrics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_metrics), + '__call__') as call: + client.get_job_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetJobMetricsRequest() + +@pytest.mark.asyncio +async def test_get_job_metrics_async(transport: str = 'grpc_asyncio', request_type=metrics.GetJobMetricsRequest): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metrics.JobMetrics( + )) + response = await client.get_job_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetJobMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metrics.JobMetrics) + + +@pytest.mark.asyncio +async def test_get_job_metrics_async_from_dict(): + await test_get_job_metrics_async(request_type=dict) + + +def test_get_job_metrics_field_headers(): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metrics.GetJobMetricsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_metrics), + '__call__') as call: + call.return_value = metrics.JobMetrics() + client.get_job_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_metrics_field_headers_async(): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metrics.GetJobMetricsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_metrics), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metrics.JobMetrics()) + await client.get_job_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + metrics.GetJobExecutionDetailsRequest, + dict, +]) +def test_get_job_execution_details(request_type, transport: str = 'grpc'): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metrics.JobExecutionDetails( + next_page_token='next_page_token_value', + ) + response = client.get_job_execution_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetJobExecutionDetailsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetJobExecutionDetailsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_get_job_execution_details_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__') as call: + client.get_job_execution_details() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetJobExecutionDetailsRequest() + +@pytest.mark.asyncio +async def test_get_job_execution_details_async(transport: str = 'grpc_asyncio', request_type=metrics.GetJobExecutionDetailsRequest): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metrics.JobExecutionDetails( + next_page_token='next_page_token_value', + )) + response = await client.get_job_execution_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetJobExecutionDetailsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetJobExecutionDetailsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_get_job_execution_details_async_from_dict(): + await test_get_job_execution_details_async(request_type=dict) + + +def test_get_job_execution_details_field_headers(): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metrics.GetJobExecutionDetailsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__') as call: + call.return_value = metrics.JobExecutionDetails() + client.get_job_execution_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_execution_details_field_headers_async(): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metrics.GetJobExecutionDetailsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metrics.JobExecutionDetails()) + await client.get_job_execution_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +def test_get_job_execution_details_pager(transport_name: str = "grpc"): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + metrics.StageSummary(), + ], + next_page_token='abc', + ), + metrics.JobExecutionDetails( + stages=[], + next_page_token='def', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token='ghi', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('project_id', ''), + ('location', ''), + ('job_id', ''), + )), + ) + pager = client.get_job_execution_details(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metrics.StageSummary) + for i in results) +def test_get_job_execution_details_pages(transport_name: str = "grpc"): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + metrics.StageSummary(), + ], + next_page_token='abc', + ), + metrics.JobExecutionDetails( + stages=[], + next_page_token='def', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token='ghi', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], + ), + RuntimeError, + ) + pages = list(client.get_job_execution_details(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_get_job_execution_details_async_pager(): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + metrics.StageSummary(), + ], + next_page_token='abc', + ), + metrics.JobExecutionDetails( + stages=[], + next_page_token='def', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token='ghi', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], + ), + RuntimeError, + ) + async_pager = await client.get_job_execution_details(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metrics.StageSummary) + for i in responses) + + +@pytest.mark.asyncio +async def test_get_job_execution_details_async_pages(): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_execution_details), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + metrics.StageSummary(), + ], + next_page_token='abc', + ), + metrics.JobExecutionDetails( + stages=[], + next_page_token='def', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token='ghi', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.get_job_execution_details(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + metrics.GetStageExecutionDetailsRequest, + dict, +]) +def test_get_stage_execution_details(request_type, transport: str = 'grpc'): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = metrics.StageExecutionDetails( + next_page_token='next_page_token_value', + ) + response = client.get_stage_execution_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetStageExecutionDetailsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetStageExecutionDetailsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_get_stage_execution_details_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__') as call: + client.get_stage_execution_details() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetStageExecutionDetailsRequest() + +@pytest.mark.asyncio +async def test_get_stage_execution_details_async(transport: str = 'grpc_asyncio', request_type=metrics.GetStageExecutionDetailsRequest): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metrics.StageExecutionDetails( + next_page_token='next_page_token_value', + )) + response = await client.get_stage_execution_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metrics.GetStageExecutionDetailsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetStageExecutionDetailsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_get_stage_execution_details_async_from_dict(): + await test_get_stage_execution_details_async(request_type=dict) + + +def test_get_stage_execution_details_field_headers(): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metrics.GetStageExecutionDetailsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + request.stage_id = 'stage_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__') as call: + call.return_value = metrics.StageExecutionDetails() + client.get_stage_execution_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value&stage_id=stage_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_stage_execution_details_field_headers_async(): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metrics.GetStageExecutionDetailsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + request.stage_id = 'stage_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metrics.StageExecutionDetails()) + await client.get_stage_execution_details(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value&stage_id=stage_id_value', + ) in kw['metadata'] + + +def test_get_stage_execution_details_pager(transport_name: str = "grpc"): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + next_page_token='abc', + ), + metrics.StageExecutionDetails( + workers=[], + next_page_token='def', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token='ghi', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('project_id', ''), + ('location', ''), + ('job_id', ''), + ('stage_id', ''), + )), + ) + pager = client.get_stage_execution_details(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metrics.WorkerDetails) + for i in results) +def test_get_stage_execution_details_pages(transport_name: str = "grpc"): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + next_page_token='abc', + ), + metrics.StageExecutionDetails( + workers=[], + next_page_token='def', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token='ghi', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + ), + RuntimeError, + ) + pages = list(client.get_stage_execution_details(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_get_stage_execution_details_async_pager(): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + next_page_token='abc', + ), + metrics.StageExecutionDetails( + workers=[], + next_page_token='def', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token='ghi', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + ), + RuntimeError, + ) + async_pager = await client.get_stage_execution_details(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metrics.WorkerDetails) + for i in responses) + + +@pytest.mark.asyncio +async def test_get_stage_execution_details_async_pages(): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stage_execution_details), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + next_page_token='abc', + ), + metrics.StageExecutionDetails( + workers=[], + next_page_token='def', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token='ghi', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.get_stage_execution_details(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + metrics.GetJobMetricsRequest, + dict, +]) +def test_get_job_metrics_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metrics.JobMetrics( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.JobMetrics.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_job_metrics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, metrics.JobMetrics) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_metrics_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "post_get_job_metrics") as post, \ + mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "pre_get_job_metrics") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetJobMetricsRequest.pb(metrics.GetJobMetricsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.JobMetrics.to_json(metrics.JobMetrics()) + + request = metrics.GetJobMetricsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.JobMetrics() + + client.get_job_metrics(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_metrics_rest_bad_request(transport: str = 'rest', request_type=metrics.GetJobMetricsRequest): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_metrics(request) + + +def test_get_job_metrics_rest_error(): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + metrics.GetJobExecutionDetailsRequest, + dict, +]) +def test_get_job_execution_details_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metrics.JobExecutionDetails( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.JobExecutionDetails.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_job_execution_details(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetJobExecutionDetailsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_job_execution_details_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "post_get_job_execution_details") as post, \ + mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "pre_get_job_execution_details") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetJobExecutionDetailsRequest.pb(metrics.GetJobExecutionDetailsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.JobExecutionDetails.to_json(metrics.JobExecutionDetails()) + + request = metrics.GetJobExecutionDetailsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.JobExecutionDetails() + + client.get_job_execution_details(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_job_execution_details_rest_bad_request(transport: str = 'rest', request_type=metrics.GetJobExecutionDetailsRequest): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_job_execution_details(request) + + +def test_get_job_execution_details_rest_pager(transport: str = 'rest'): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + metrics.StageSummary(), + ], + next_page_token='abc', + ), + metrics.JobExecutionDetails( + stages=[], + next_page_token='def', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + ], + next_page_token='ghi', + ), + metrics.JobExecutionDetails( + stages=[ + metrics.StageSummary(), + metrics.StageSummary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metrics.JobExecutionDetails.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + + pager = client.get_job_execution_details(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metrics.StageSummary) + for i in results) + + pages = list(client.get_job_execution_details(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + metrics.GetStageExecutionDetailsRequest, + dict, +]) +def test_get_stage_execution_details_rest(request_type): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3', 'stage_id': 'sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = metrics.StageExecutionDetails( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = metrics.StageExecutionDetails.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_stage_execution_details(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.GetStageExecutionDetailsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_stage_execution_details_rest_interceptors(null_interceptor): + transport = transports.MetricsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetricsV1Beta3RestInterceptor(), + ) + client = MetricsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "post_get_stage_execution_details") as post, \ + mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "pre_get_stage_execution_details") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = metrics.GetStageExecutionDetailsRequest.pb(metrics.GetStageExecutionDetailsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = metrics.StageExecutionDetails.to_json(metrics.StageExecutionDetails()) + + request = metrics.GetStageExecutionDetailsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = metrics.StageExecutionDetails() + + client.get_stage_execution_details(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_stage_execution_details_rest_bad_request(transport: str = 'rest', request_type=metrics.GetStageExecutionDetailsRequest): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3', 'stage_id': 'sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_stage_execution_details(request) + + +def test_get_stage_execution_details_rest_pager(transport: str = 'rest'): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + next_page_token='abc', + ), + metrics.StageExecutionDetails( + workers=[], + next_page_token='def', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + ], + next_page_token='ghi', + ), + metrics.StageExecutionDetails( + workers=[ + metrics.WorkerDetails(), + metrics.WorkerDetails(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(metrics.StageExecutionDetails.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3', 'stage_id': 'sample4'} + + pager = client.get_stage_execution_details(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, metrics.WorkerDetails) + for i in results) + + pages = list(client.get_stage_execution_details(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetricsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetricsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsV1Beta3Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MetricsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsV1Beta3Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsV1Beta3Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetricsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsV1Beta3Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MetricsV1Beta3Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetricsV1Beta3GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.MetricsV1Beta3GrpcTransport, + transports.MetricsV1Beta3GrpcAsyncIOTransport, + transports.MetricsV1Beta3RestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = MetricsV1Beta3Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricsV1Beta3GrpcTransport, + ) + +def test_metrics_v1_beta3_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetricsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_metrics_v1_beta3_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.MetricsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get_job_metrics', + 'get_job_execution_details', + 'get_stage_execution_details', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_metrics_v1_beta3_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsV1Beta3Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id="octopus", + ) + + +def test_metrics_v1_beta3_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsV1Beta3Transport() + adc.assert_called_once() + + +def test_metrics_v1_beta3_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricsV1Beta3Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsV1Beta3GrpcTransport, + transports.MetricsV1Beta3GrpcAsyncIOTransport, + ], +) +def test_metrics_v1_beta3_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsV1Beta3GrpcTransport, + transports.MetricsV1Beta3GrpcAsyncIOTransport, + transports.MetricsV1Beta3RestTransport, + ], +) +def test_metrics_v1_beta3_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsV1Beta3GrpcTransport, grpc_helpers), + (transports.MetricsV1Beta3GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_metrics_v1_beta3_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=["1", "2"], + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport]) +def test_metrics_v1_beta3_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_metrics_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.MetricsV1Beta3RestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_metrics_v1_beta3_host_no_port(transport_name): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_metrics_v1_beta3_host_with_port(transport_name): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_metrics_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MetricsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = MetricsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_job_metrics._session + session2 = client2.transport.get_job_metrics._session + assert session1 != session2 + session1 = client1.transport.get_job_execution_details._session + session2 = client2.transport.get_job_execution_details._session + assert session1 != session2 + session1 = client1.transport.get_stage_execution_details._session + session2 = client2.transport.get_stage_execution_details._session + assert session1 != session2 +def test_metrics_v1_beta3_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricsV1Beta3GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metrics_v1_beta3_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricsV1Beta3GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport]) +def test_metrics_v1_beta3_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport]) +def test_metrics_v1_beta3_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MetricsV1Beta3Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = MetricsV1Beta3Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsV1Beta3Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = MetricsV1Beta3Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = MetricsV1Beta3Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsV1Beta3Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MetricsV1Beta3Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = MetricsV1Beta3Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsV1Beta3Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = MetricsV1Beta3Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = MetricsV1Beta3Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsV1Beta3Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MetricsV1Beta3Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = MetricsV1Beta3Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsV1Beta3Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MetricsV1Beta3Transport, '_prep_wrapped_messages') as prep: + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MetricsV1Beta3Transport, '_prep_wrapped_messages') as prep: + transport_class = MetricsV1Beta3Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MetricsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = MetricsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport), + (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py new file mode 100644 index 0000000..785bf64 --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py @@ -0,0 +1,2013 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import SnapshotsV1Beta3AsyncClient +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import SnapshotsV1Beta3Client +from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import transports +from google.cloud.dataflow_v1beta3.types import snapshots +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(None) is None + assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SnapshotsV1Beta3Client, "grpc"), + (SnapshotsV1Beta3AsyncClient, "grpc_asyncio"), + (SnapshotsV1Beta3Client, "rest"), +]) +def test_snapshots_v1_beta3_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SnapshotsV1Beta3GrpcTransport, "grpc"), + (transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SnapshotsV1Beta3RestTransport, "rest"), +]) +def test_snapshots_v1_beta3_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SnapshotsV1Beta3Client, "grpc"), + (SnapshotsV1Beta3AsyncClient, "grpc_asyncio"), + (SnapshotsV1Beta3Client, "rest"), +]) +def test_snapshots_v1_beta3_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +def test_snapshots_v1_beta3_client_get_transport_class(): + transport = SnapshotsV1Beta3Client.get_transport_class() + available_transports = [ + transports.SnapshotsV1Beta3GrpcTransport, + transports.SnapshotsV1Beta3RestTransport, + ] + assert transport in available_transports + + transport = SnapshotsV1Beta3Client.get_transport_class("grpc") + assert transport == transports.SnapshotsV1Beta3GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc"), + (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), +]) +@mock.patch.object(SnapshotsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3Client)) +@mock.patch.object(SnapshotsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3AsyncClient)) +def test_snapshots_v1_beta3_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SnapshotsV1Beta3Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SnapshotsV1Beta3Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc", "true"), + (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc", "false"), + (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "false"), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest", "true"), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest", "false"), +]) +@mock.patch.object(SnapshotsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3Client)) +@mock.patch.object(SnapshotsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_snapshots_v1_beta3_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + SnapshotsV1Beta3Client, SnapshotsV1Beta3AsyncClient +]) +@mock.patch.object(SnapshotsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3Client)) +@mock.patch.object(SnapshotsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3AsyncClient)) +def test_snapshots_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc"), + (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), +]) +def test_snapshots_v1_beta3_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc", grpc_helpers), + (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest", None), +]) +def test_snapshots_v1_beta3_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_snapshots_v1_beta3_client_client_options_from_dict(): + with mock.patch('google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = SnapshotsV1Beta3Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc", grpc_helpers), + (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_snapshots_v1_beta3_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=None, + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + snapshots.GetSnapshotRequest, + dict, +]) +def test_get_snapshot(request_type, transport: str = 'grpc'): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = snapshots.Snapshot( + id='id_value', + project_id='project_id_value', + source_job_id='source_job_id_value', + state=snapshots.SnapshotState.PENDING, + description='description_value', + disk_size_bytes=1611, + region='region_value', + ) + response = client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.GetSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.source_job_id == 'source_job_id_value' + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == 'description_value' + assert response.disk_size_bytes == 1611 + assert response.region == 'region_value' + + +def test_get_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + client.get_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.GetSnapshotRequest() + +@pytest.mark.asyncio +async def test_get_snapshot_async(transport: str = 'grpc_asyncio', request_type=snapshots.GetSnapshotRequest): + client = SnapshotsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(snapshots.Snapshot( + id='id_value', + project_id='project_id_value', + source_job_id='source_job_id_value', + state=snapshots.SnapshotState.PENDING, + description='description_value', + disk_size_bytes=1611, + region='region_value', + )) + response = await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.GetSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.source_job_id == 'source_job_id_value' + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == 'description_value' + assert response.disk_size_bytes == 1611 + assert response.region == 'region_value' + + +@pytest.mark.asyncio +async def test_get_snapshot_async_from_dict(): + await test_get_snapshot_async(request_type=dict) + + +def test_get_snapshot_field_headers(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = snapshots.GetSnapshotRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.snapshot_id = 'snapshot_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + call.return_value = snapshots.Snapshot() + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&snapshot_id=snapshot_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_snapshot_field_headers_async(): + client = SnapshotsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = snapshots.GetSnapshotRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.snapshot_id = 'snapshot_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(snapshots.Snapshot()) + await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&snapshot_id=snapshot_id_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + snapshots.DeleteSnapshotRequest, + dict, +]) +def test_delete_snapshot(request_type, transport: str = 'grpc'): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = snapshots.DeleteSnapshotResponse( + ) + response = client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.DeleteSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.DeleteSnapshotResponse) + + +def test_delete_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + client.delete_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.DeleteSnapshotRequest() + +@pytest.mark.asyncio +async def test_delete_snapshot_async(transport: str = 'grpc_asyncio', request_type=snapshots.DeleteSnapshotRequest): + client = SnapshotsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(snapshots.DeleteSnapshotResponse( + )) + response = await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.DeleteSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.DeleteSnapshotResponse) + + +@pytest.mark.asyncio +async def test_delete_snapshot_async_from_dict(): + await test_delete_snapshot_async(request_type=dict) + + +def test_delete_snapshot_field_headers(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = snapshots.DeleteSnapshotRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.snapshot_id = 'snapshot_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + call.return_value = snapshots.DeleteSnapshotResponse() + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&snapshot_id=snapshot_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_snapshot_field_headers_async(): + client = SnapshotsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = snapshots.DeleteSnapshotRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.snapshot_id = 'snapshot_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(snapshots.DeleteSnapshotResponse()) + await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&snapshot_id=snapshot_id_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + snapshots.ListSnapshotsRequest, + dict, +]) +def test_list_snapshots(request_type, transport: str = 'grpc'): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = snapshots.ListSnapshotsResponse( + ) + response = client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.ListSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.ListSnapshotsResponse) + + +def test_list_snapshots_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + client.list_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.ListSnapshotsRequest() + +@pytest.mark.asyncio +async def test_list_snapshots_async(transport: str = 'grpc_asyncio', request_type=snapshots.ListSnapshotsRequest): + client = SnapshotsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(snapshots.ListSnapshotsResponse( + )) + response = await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == snapshots.ListSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.ListSnapshotsResponse) + + +@pytest.mark.asyncio +async def test_list_snapshots_async_from_dict(): + await test_list_snapshots_async(request_type=dict) + + +def test_list_snapshots_field_headers(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = snapshots.ListSnapshotsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + call.return_value = snapshots.ListSnapshotsResponse() + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_snapshots_field_headers_async(): + client = SnapshotsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = snapshots.ListSnapshotsRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + request.job_id = 'job_id_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(snapshots.ListSnapshotsResponse()) + await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value&job_id=job_id_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + snapshots.GetSnapshotRequest, + dict, +]) +def test_get_snapshot_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'snapshot_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.Snapshot( + id='id_value', + project_id='project_id_value', + source_job_id='source_job_id_value', + state=snapshots.SnapshotState.PENDING, + description='description_value', + disk_size_bytes=1611, + region='region_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.Snapshot) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.source_job_id == 'source_job_id_value' + assert response.state == snapshots.SnapshotState.PENDING + assert response.description == 'description_value' + assert response.disk_size_bytes == 1611 + assert response.region == 'region_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_snapshot_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "post_get_snapshot") as post, \ + mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "pre_get_snapshot") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.GetSnapshotRequest.pb(snapshots.GetSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.Snapshot.to_json(snapshots.Snapshot()) + + request = snapshots.GetSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.Snapshot() + + client.get_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_snapshot_rest_bad_request(transport: str = 'rest', request_type=snapshots.GetSnapshotRequest): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'snapshot_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_snapshot(request) + + +def test_get_snapshot_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + snapshots.DeleteSnapshotRequest, + dict, +]) +def test_delete_snapshot_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'snapshot_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.DeleteSnapshotResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.DeleteSnapshotResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.DeleteSnapshotResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_snapshot_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "post_delete_snapshot") as post, \ + mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "pre_delete_snapshot") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.DeleteSnapshotRequest.pb(snapshots.DeleteSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.DeleteSnapshotResponse.to_json(snapshots.DeleteSnapshotResponse()) + + request = snapshots.DeleteSnapshotRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.DeleteSnapshotResponse() + + client.delete_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_snapshot_rest_bad_request(transport: str = 'rest', request_type=snapshots.DeleteSnapshotRequest): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'snapshot_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_snapshot(request) + + +def test_delete_snapshot_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + snapshots.ListSnapshotsRequest, + dict, +]) +def test_list_snapshots_rest(request_type): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = snapshots.ListSnapshotsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = snapshots.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, snapshots.ListSnapshotsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_snapshots_rest_interceptors(null_interceptor): + transport = transports.SnapshotsV1Beta3RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsV1Beta3RestInterceptor(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "post_list_snapshots") as post, \ + mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "pre_list_snapshots") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = snapshots.ListSnapshotsRequest.pb(snapshots.ListSnapshotsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = snapshots.ListSnapshotsResponse.to_json(snapshots.ListSnapshotsResponse()) + + request = snapshots.ListSnapshotsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = snapshots.ListSnapshotsResponse() + + client.list_snapshots(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_snapshots_rest_bad_request(transport: str = 'rest', request_type=snapshots.ListSnapshotsRequest): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_snapshots(request) + + +def test_list_snapshots_rest_error(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SnapshotsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SnapshotsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotsV1Beta3Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SnapshotsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SnapshotsV1Beta3Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SnapshotsV1Beta3Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SnapshotsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SnapshotsV1Beta3Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SnapshotsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SnapshotsV1Beta3Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SnapshotsV1Beta3GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SnapshotsV1Beta3GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.SnapshotsV1Beta3GrpcTransport, + transports.SnapshotsV1Beta3GrpcAsyncIOTransport, + transports.SnapshotsV1Beta3RestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = SnapshotsV1Beta3Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SnapshotsV1Beta3GrpcTransport, + ) + +def test_snapshots_v1_beta3_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SnapshotsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_snapshots_v1_beta3_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.SnapshotsV1Beta3Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get_snapshot', + 'delete_snapshot', + 'list_snapshots', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_snapshots_v1_beta3_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SnapshotsV1Beta3Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id="octopus", + ) + + +def test_snapshots_v1_beta3_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SnapshotsV1Beta3Transport() + adc.assert_called_once() + + +def test_snapshots_v1_beta3_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SnapshotsV1Beta3Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SnapshotsV1Beta3GrpcTransport, + transports.SnapshotsV1Beta3GrpcAsyncIOTransport, + ], +) +def test_snapshots_v1_beta3_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SnapshotsV1Beta3GrpcTransport, + transports.SnapshotsV1Beta3GrpcAsyncIOTransport, + transports.SnapshotsV1Beta3RestTransport, + ], +) +def test_snapshots_v1_beta3_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SnapshotsV1Beta3GrpcTransport, grpc_helpers), + (transports.SnapshotsV1Beta3GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_snapshots_v1_beta3_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=["1", "2"], + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport]) +def test_snapshots_v1_beta3_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_snapshots_v1_beta3_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.SnapshotsV1Beta3RestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_snapshots_v1_beta3_host_no_port(transport_name): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_snapshots_v1_beta3_host_with_port(transport_name): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_snapshots_v1_beta3_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SnapshotsV1Beta3Client( + credentials=creds1, + transport=transport_name, + ) + client2 = SnapshotsV1Beta3Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_snapshot._session + session2 = client2.transport.get_snapshot._session + assert session1 != session2 + session1 = client1.transport.delete_snapshot._session + session2 = client2.transport.delete_snapshot._session + assert session1 != session2 + session1 = client1.transport.list_snapshots._session + session2 = client2.transport.list_snapshots._session + assert session1 != session2 +def test_snapshots_v1_beta3_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SnapshotsV1Beta3GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_snapshots_v1_beta3_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SnapshotsV1Beta3GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport]) +def test_snapshots_v1_beta3_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport]) +def test_snapshots_v1_beta3_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SnapshotsV1Beta3Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SnapshotsV1Beta3Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsV1Beta3Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = SnapshotsV1Beta3Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SnapshotsV1Beta3Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsV1Beta3Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SnapshotsV1Beta3Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SnapshotsV1Beta3Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsV1Beta3Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = SnapshotsV1Beta3Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SnapshotsV1Beta3Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsV1Beta3Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SnapshotsV1Beta3Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SnapshotsV1Beta3Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SnapshotsV1Beta3Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SnapshotsV1Beta3Transport, '_prep_wrapped_messages') as prep: + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SnapshotsV1Beta3Transport, '_prep_wrapped_messages') as prep: + transport_class = SnapshotsV1Beta3Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SnapshotsV1Beta3AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = SnapshotsV1Beta3Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport), + (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py new file mode 100644 index 0000000..00d9c6d --- /dev/null +++ b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py @@ -0,0 +1,2059 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataflow_v1beta3.services.templates_service import TemplatesServiceAsyncClient +from google.cloud.dataflow_v1beta3.services.templates_service import TemplatesServiceClient +from google.cloud.dataflow_v1beta3.services.templates_service import transports +from google.cloud.dataflow_v1beta3.types import environment +from google.cloud.dataflow_v1beta3.types import jobs +from google.cloud.dataflow_v1beta3.types import templates +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TemplatesServiceClient._get_default_mtls_endpoint(None) is None + assert TemplatesServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TemplatesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TemplatesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TemplatesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TemplatesServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TemplatesServiceClient, "grpc"), + (TemplatesServiceAsyncClient, "grpc_asyncio"), + (TemplatesServiceClient, "rest"), +]) +def test_templates_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TemplatesServiceGrpcTransport, "grpc"), + (transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.TemplatesServiceRestTransport, "rest"), +]) +def test_templates_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TemplatesServiceClient, "grpc"), + (TemplatesServiceAsyncClient, "grpc_asyncio"), + (TemplatesServiceClient, "rest"), +]) +def test_templates_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://dataflow.googleapis.com' + ) + + +def test_templates_service_client_get_transport_class(): + transport = TemplatesServiceClient.get_transport_class() + available_transports = [ + transports.TemplatesServiceGrpcTransport, + transports.TemplatesServiceRestTransport, + ] + assert transport in available_transports + + transport = TemplatesServiceClient.get_transport_class("grpc") + assert transport == transports.TemplatesServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc"), + (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), +]) +@mock.patch.object(TemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceClient)) +@mock.patch.object(TemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceAsyncClient)) +def test_templates_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TemplatesServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TemplatesServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc", "true"), + (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc", "false"), + (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest", "true"), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest", "false"), +]) +@mock.patch.object(TemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceClient)) +@mock.patch.object(TemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_templates_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TemplatesServiceClient, TemplatesServiceAsyncClient +]) +@mock.patch.object(TemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceClient)) +@mock.patch.object(TemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceAsyncClient)) +def test_templates_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc"), + (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), +]) +def test_templates_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc", grpc_helpers), + (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest", None), +]) +def test_templates_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_templates_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = TemplatesServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc", grpc_helpers), + (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_templates_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=None, + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + templates.CreateJobFromTemplateRequest, + dict, +]) +def test_create_job_from_template(request_type, transport: str = 'grpc'): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_from_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + ) + response = client.create_job_from_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == templates.CreateJobFromTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +def test_create_job_from_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_from_template), + '__call__') as call: + client.create_job_from_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == templates.CreateJobFromTemplateRequest() + +@pytest.mark.asyncio +async def test_create_job_from_template_async(transport: str = 'grpc_asyncio', request_type=templates.CreateJobFromTemplateRequest): + client = TemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_from_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + )) + response = await client.create_job_from_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == templates.CreateJobFromTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +@pytest.mark.asyncio +async def test_create_job_from_template_async_from_dict(): + await test_create_job_from_template_async(request_type=dict) + + +def test_create_job_from_template_field_headers(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = templates.CreateJobFromTemplateRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_from_template), + '__call__') as call: + call.return_value = jobs.Job() + client.create_job_from_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_from_template_field_headers_async(): + client = TemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = templates.CreateJobFromTemplateRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_from_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) + await client.create_job_from_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + templates.LaunchTemplateRequest, + dict, +]) +def test_launch_template(request_type, transport: str = 'grpc'): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = templates.LaunchTemplateResponse( + ) + response = client.launch_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == templates.LaunchTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchTemplateResponse) + + +def test_launch_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_template), + '__call__') as call: + client.launch_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == templates.LaunchTemplateRequest() + +@pytest.mark.asyncio +async def test_launch_template_async(transport: str = 'grpc_asyncio', request_type=templates.LaunchTemplateRequest): + client = TemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(templates.LaunchTemplateResponse( + )) + response = await client.launch_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == templates.LaunchTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchTemplateResponse) + + +@pytest.mark.asyncio +async def test_launch_template_async_from_dict(): + await test_launch_template_async(request_type=dict) + + +def test_launch_template_field_headers(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = templates.LaunchTemplateRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_template), + '__call__') as call: + call.return_value = templates.LaunchTemplateResponse() + client.launch_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_launch_template_field_headers_async(): + client = TemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = templates.LaunchTemplateRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.launch_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(templates.LaunchTemplateResponse()) + await client.launch_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + templates.GetTemplateRequest, + dict, +]) +def test_get_template(request_type, transport: str = 'grpc'): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = templates.GetTemplateResponse( + template_type=templates.GetTemplateResponse.TemplateType.LEGACY, + ) + response = client.get_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == templates.GetTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.GetTemplateResponse) + assert response.template_type == templates.GetTemplateResponse.TemplateType.LEGACY + + +def test_get_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_template), + '__call__') as call: + client.get_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == templates.GetTemplateRequest() + +@pytest.mark.asyncio +async def test_get_template_async(transport: str = 'grpc_asyncio', request_type=templates.GetTemplateRequest): + client = TemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(templates.GetTemplateResponse( + template_type=templates.GetTemplateResponse.TemplateType.LEGACY, + )) + response = await client.get_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == templates.GetTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.GetTemplateResponse) + assert response.template_type == templates.GetTemplateResponse.TemplateType.LEGACY + + +@pytest.mark.asyncio +async def test_get_template_async_from_dict(): + await test_get_template_async(request_type=dict) + + +def test_get_template_field_headers(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = templates.GetTemplateRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_template), + '__call__') as call: + call.return_value = templates.GetTemplateResponse() + client.get_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_template_field_headers_async(): + client = TemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = templates.GetTemplateRequest() + + request.project_id = 'project_id_value' + request.location = 'location_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(templates.GetTemplateResponse()) + await client.get_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project_id=project_id_value&location=location_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + templates.CreateJobFromTemplateRequest, + dict, +]) +def test_create_job_from_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = jobs.Job( + id='id_value', + project_id='project_id_value', + name='name_value', + type_=environment.JobType.JOB_TYPE_BATCH, + steps_location='steps_location_value', + current_state=jobs.JobState.JOB_STATE_STOPPED, + requested_state=jobs.JobState.JOB_STATE_STOPPED, + replace_job_id='replace_job_id_value', + client_request_id='client_request_id_value', + replaced_by_job_id='replaced_by_job_id_value', + temp_files=['temp_files_value'], + location='location_value', + created_from_snapshot_id='created_from_snapshot_id_value', + satisfies_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = jobs.Job.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_job_from_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, jobs.Job) + assert response.id == 'id_value' + assert response.project_id == 'project_id_value' + assert response.name == 'name_value' + assert response.type_ == environment.JobType.JOB_TYPE_BATCH + assert response.steps_location == 'steps_location_value' + assert response.current_state == jobs.JobState.JOB_STATE_STOPPED + assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED + assert response.replace_job_id == 'replace_job_id_value' + assert response.client_request_id == 'client_request_id_value' + assert response.replaced_by_job_id == 'replaced_by_job_id_value' + assert response.temp_files == ['temp_files_value'] + assert response.location == 'location_value' + assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' + assert response.satisfies_pzs is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_job_from_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TemplatesServiceRestInterceptor, "post_create_job_from_template") as post, \ + mock.patch.object(transports.TemplatesServiceRestInterceptor, "pre_create_job_from_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.CreateJobFromTemplateRequest.pb(templates.CreateJobFromTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = jobs.Job.to_json(jobs.Job()) + + request = templates.CreateJobFromTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = jobs.Job() + + client.create_job_from_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_job_from_template_rest_bad_request(transport: str = 'rest', request_type=templates.CreateJobFromTemplateRequest): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_job_from_template(request) + + +def test_create_job_from_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + templates.LaunchTemplateRequest, + dict, +]) +def test_launch_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request_init["launch_parameters"] = {'job_name': 'job_name_value', 'parameters': {}, 'environment': {'num_workers': 1212, 'max_workers': 1202, 'zone': 'zone_value', 'service_account_email': 'service_account_email_value', 'temp_location': 'temp_location_value', 'bypass_temp_dir_validation': True, 'machine_type': 'machine_type_value', 'additional_experiments': ['additional_experiments_value1', 'additional_experiments_value2'], 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'additional_user_labels': {}, 'kms_key_name': 'kms_key_name_value', 'ip_configuration': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'enable_streaming_engine': True}, 'update': True, 'transform_name_mapping': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = templates.LaunchTemplateResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.LaunchTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.launch_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.LaunchTemplateResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_launch_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TemplatesServiceRestInterceptor, "post_launch_template") as post, \ + mock.patch.object(transports.TemplatesServiceRestInterceptor, "pre_launch_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.LaunchTemplateRequest.pb(templates.LaunchTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.LaunchTemplateResponse.to_json(templates.LaunchTemplateResponse()) + + request = templates.LaunchTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.LaunchTemplateResponse() + + client.launch_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_launch_template_rest_bad_request(transport: str = 'rest', request_type=templates.LaunchTemplateRequest): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request_init["launch_parameters"] = {'job_name': 'job_name_value', 'parameters': {}, 'environment': {'num_workers': 1212, 'max_workers': 1202, 'zone': 'zone_value', 'service_account_email': 'service_account_email_value', 'temp_location': 'temp_location_value', 'bypass_temp_dir_validation': True, 'machine_type': 'machine_type_value', 'additional_experiments': ['additional_experiments_value1', 'additional_experiments_value2'], 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'additional_user_labels': {}, 'kms_key_name': 'kms_key_name_value', 'ip_configuration': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'enable_streaming_engine': True}, 'update': True, 'transform_name_mapping': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.launch_template(request) + + +def test_launch_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + templates.GetTemplateRequest, + dict, +]) +def test_get_template_rest(request_type): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = templates.GetTemplateResponse( + template_type=templates.GetTemplateResponse.TemplateType.LEGACY, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = templates.GetTemplateResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_template(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, templates.GetTemplateResponse) + assert response.template_type == templates.GetTemplateResponse.TemplateType.LEGACY + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_template_rest_interceptors(null_interceptor): + transport = transports.TemplatesServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.TemplatesServiceRestInterceptor(), + ) + client = TemplatesServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.TemplatesServiceRestInterceptor, "post_get_template") as post, \ + mock.patch.object(transports.TemplatesServiceRestInterceptor, "pre_get_template") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = templates.GetTemplateRequest.pb(templates.GetTemplateRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = templates.GetTemplateResponse.to_json(templates.GetTemplateResponse()) + + request = templates.GetTemplateRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = templates.GetTemplateResponse() + + client.get_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_template_rest_bad_request(transport: str = 'rest', request_type=templates.GetTemplateRequest): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'project_id': 'sample1', 'location': 'sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_template(request) + + +def test_get_template_rest_error(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TemplatesServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TemplatesServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TemplatesServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TemplatesServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TemplatesServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TemplatesServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TemplatesServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.TemplatesServiceGrpcTransport, + transports.TemplatesServiceGrpcAsyncIOTransport, + transports.TemplatesServiceRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "rest", +]) +def test_transport_kind(transport_name): + transport = TemplatesServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TemplatesServiceGrpcTransport, + ) + +def test_templates_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TemplatesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_templates_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TemplatesServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_job_from_template', + 'launch_template', + 'get_template', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_templates_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TemplatesServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id="octopus", + ) + + +def test_templates_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TemplatesServiceTransport() + adc.assert_called_once() + + +def test_templates_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TemplatesServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TemplatesServiceGrpcTransport, + transports.TemplatesServiceGrpcAsyncIOTransport, + ], +) +def test_templates_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TemplatesServiceGrpcTransport, + transports.TemplatesServiceGrpcAsyncIOTransport, + transports.TemplatesServiceRestTransport, + ], +) +def test_templates_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TemplatesServiceGrpcTransport, grpc_helpers), + (transports.TemplatesServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_templates_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dataflow.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/compute', + 'https://www.googleapis.com/auth/compute.readonly', + 'https://www.googleapis.com/auth/userinfo.email', +), + scopes=["1", "2"], + default_host="dataflow.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport]) +def test_templates_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_templates_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.TemplatesServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_templates_service_host_no_port(transport_name): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_templates_service_host_with_port(transport_name): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'dataflow.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://dataflow.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_templates_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = TemplatesServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = TemplatesServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_job_from_template._session + session2 = client2.transport.create_job_from_template._session + assert session1 != session2 + session1 = client1.transport.launch_template._session + session2 = client2.transport.launch_template._session + assert session1 != session2 + session1 = client1.transport.get_template._session + session2 = client2.transport.get_template._session + assert session1 != session2 +def test_templates_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TemplatesServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_templates_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TemplatesServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport]) +def test_templates_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport]) +def test_templates_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TemplatesServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TemplatesServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TemplatesServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TemplatesServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TemplatesServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TemplatesServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TemplatesServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TemplatesServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TemplatesServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TemplatesServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TemplatesServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TemplatesServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TemplatesServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TemplatesServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TemplatesServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TemplatesServiceTransport, '_prep_wrapped_messages') as prep: + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TemplatesServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = TemplatesServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TemplatesServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "rest": "_session", + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = TemplatesServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport), + (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 5e2957b25a96b5a74c120348db9dbcd3b8708ad9 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 13 Dec 2022 20:57:18 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../flex_templates_service/transports/rest.py | 9 +- .../services/jobs_v1_beta3/transports/rest.py | 39 +- .../messages_v1_beta3/transports/rest.py | 9 +- .../metrics_v1_beta3/transports/rest.py | 19 +- .../snapshots_v1_beta3/transports/rest.py | 19 +- .../templates_service/transports/rest.py | 19 +- owl-bot-staging/v1beta3/.coveragerc | 12 - owl-bot-staging/v1beta3/.flake8 | 33 - owl-bot-staging/v1beta3/MANIFEST.in | 2 - owl-bot-staging/v1beta3/README.rst | 49 - owl-bot-staging/v1beta3/docs/conf.py | 376 -- .../flex_templates_service.rst | 6 - .../docs/dataflow_v1beta3/jobs_v1_beta3.rst | 10 - .../dataflow_v1beta3/messages_v1_beta3.rst | 10 - .../dataflow_v1beta3/metrics_v1_beta3.rst | 10 - .../docs/dataflow_v1beta3/services.rst | 11 - .../dataflow_v1beta3/snapshots_v1_beta3.rst | 6 - .../dataflow_v1beta3/templates_service.rst | 6 - .../v1beta3/docs/dataflow_v1beta3/types.rst | 6 - owl-bot-staging/v1beta3/docs/index.rst | 7 - .../v1beta3/google/cloud/dataflow/__init__.py | 257 -- .../google/cloud/dataflow/gapic_version.py | 16 - .../v1beta3/google/cloud/dataflow/py.typed | 2 - .../google/cloud/dataflow_v1beta3/__init__.py | 258 -- .../dataflow_v1beta3/gapic_metadata.json | 393 -- .../cloud/dataflow_v1beta3/gapic_version.py | 16 - .../google/cloud/dataflow_v1beta3/py.typed | 2 - .../dataflow_v1beta3/services/__init__.py | 15 - .../flex_templates_service/__init__.py | 22 - .../flex_templates_service/async_client.py | 279 -- .../services/flex_templates_service/client.py | 475 --- .../transports/__init__.py | 38 - .../flex_templates_service/transports/base.py | 151 - .../flex_templates_service/transports/grpc.py | 265 -- .../transports/grpc_asyncio.py | 264 -- .../flex_templates_service/transports/rest.py | 294 -- .../services/jobs_v1_beta3/__init__.py | 22 - .../services/jobs_v1_beta3/async_client.py | 825 ---- .../services/jobs_v1_beta3/client.py | 1027 ----- .../services/jobs_v1_beta3/pagers.py | 260 -- .../jobs_v1_beta3/transports/__init__.py | 38 - .../services/jobs_v1_beta3/transports/base.py | 236 -- .../services/jobs_v1_beta3/transports/grpc.py | 451 -- .../jobs_v1_beta3/transports/grpc_asyncio.py | 450 -- .../services/jobs_v1_beta3/transports/rest.py | 902 ---- .../services/messages_v1_beta3/__init__.py | 22 - .../messages_v1_beta3/async_client.py | 304 -- .../services/messages_v1_beta3/client.py | 500 --- .../services/messages_v1_beta3/pagers.py | 139 - .../messages_v1_beta3/transports/__init__.py | 38 - .../messages_v1_beta3/transports/base.py | 151 - .../messages_v1_beta3/transports/grpc.py | 273 -- .../transports/grpc_asyncio.py | 272 -- .../messages_v1_beta3/transports/rest.py | 292 -- .../services/metrics_v1_beta3/__init__.py | 22 - .../services/metrics_v1_beta3/async_client.py | 496 --- .../services/metrics_v1_beta3/client.py | 694 ---- .../services/metrics_v1_beta3/pagers.py | 260 -- .../metrics_v1_beta3/transports/__init__.py | 38 - .../metrics_v1_beta3/transports/base.py | 179 - .../metrics_v1_beta3/transports/grpc.py | 332 -- .../transports/grpc_asyncio.py | 331 -- .../metrics_v1_beta3/transports/rest.py | 508 --- .../services/snapshots_v1_beta3/__init__.py | 22 - .../snapshots_v1_beta3/async_client.py | 437 -- .../services/snapshots_v1_beta3/client.py | 635 --- .../snapshots_v1_beta3/transports/__init__.py | 38 - .../snapshots_v1_beta3/transports/base.py | 179 - .../snapshots_v1_beta3/transports/grpc.py | 317 -- .../transports/grpc_asyncio.py | 316 -- .../snapshots_v1_beta3/transports/rest.py | 503 --- .../services/templates_service/__init__.py | 22 - .../templates_service/async_client.py | 446 -- .../services/templates_service/client.py | 644 --- .../templates_service/transports/__init__.py | 38 - .../templates_service/transports/base.py | 180 - .../templates_service/transports/grpc.py | 318 -- .../transports/grpc_asyncio.py | 317 -- .../templates_service/transports/rest.py | 528 --- .../cloud/dataflow_v1beta3/types/__init__.py | 242 -- .../dataflow_v1beta3/types/environment.py | 891 ---- .../cloud/dataflow_v1beta3/types/jobs.py | 1425 ------- .../cloud/dataflow_v1beta3/types/messages.py | 302 -- .../cloud/dataflow_v1beta3/types/metrics.py | 619 --- .../cloud/dataflow_v1beta3/types/snapshots.py | 253 -- .../cloud/dataflow_v1beta3/types/streaming.py | 501 --- .../cloud/dataflow_v1beta3/types/templates.py | 1063 ----- owl-bot-staging/v1beta3/mypy.ini | 3 - owl-bot-staging/v1beta3/noxfile.py | 183 - ...ates_service_launch_flex_template_async.py | 51 - ...lates_service_launch_flex_template_sync.py | 51 - ...obs_v1_beta3_aggregated_list_jobs_async.py | 52 - ...jobs_v1_beta3_aggregated_list_jobs_sync.py | 52 - ...d_jobs_v1_beta3_check_active_jobs_async.py | 51 - ...ed_jobs_v1_beta3_check_active_jobs_sync.py | 51 - ...enerated_jobs_v1_beta3_create_job_async.py | 51 - ...generated_jobs_v1_beta3_create_job_sync.py | 51 - ...3_generated_jobs_v1_beta3_get_job_async.py | 51 - ...a3_generated_jobs_v1_beta3_get_job_sync.py | 51 - ...generated_jobs_v1_beta3_list_jobs_async.py | 52 - ..._generated_jobs_v1_beta3_list_jobs_sync.py | 52 - ...erated_jobs_v1_beta3_snapshot_job_async.py | 51 - ...nerated_jobs_v1_beta3_snapshot_job_sync.py | 51 - ...enerated_jobs_v1_beta3_update_job_async.py | 51 - ...generated_jobs_v1_beta3_update_job_sync.py | 51 - ...ssages_v1_beta3_list_job_messages_async.py | 52 - ...essages_v1_beta3_list_job_messages_sync.py | 52 - ...1_beta3_get_job_execution_details_async.py | 52 - ...v1_beta3_get_job_execution_details_sync.py | 52 - ..._metrics_v1_beta3_get_job_metrics_async.py | 51 - ...d_metrics_v1_beta3_get_job_metrics_sync.py | 51 - ...beta3_get_stage_execution_details_async.py | 52 - ..._beta3_get_stage_execution_details_sync.py | 52 - ...napshots_v1_beta3_delete_snapshot_async.py | 51 - ...snapshots_v1_beta3_delete_snapshot_sync.py | 51 - ...d_snapshots_v1_beta3_get_snapshot_async.py | 51 - ...ed_snapshots_v1_beta3_get_snapshot_sync.py | 51 - ...snapshots_v1_beta3_list_snapshots_async.py | 51 - ..._snapshots_v1_beta3_list_snapshots_sync.py | 51 - ..._service_create_job_from_template_async.py | 52 - ...s_service_create_job_from_template_sync.py | 52 - ...ed_templates_service_get_template_async.py | 52 - ...ted_templates_service_get_template_sync.py | 52 - ...templates_service_launch_template_async.py | 52 - ..._templates_service_launch_template_sync.py | 52 - ...ppet_metadata_google.dataflow.v1beta3.json | 2769 ------------- .../fixup_dataflow_v1beta3_keywords.py | 193 - owl-bot-staging/v1beta3/setup.py | 90 - .../v1beta3/testing/constraints-3.10.txt | 6 - .../v1beta3/testing/constraints-3.11.txt | 6 - .../v1beta3/testing/constraints-3.7.txt | 9 - .../v1beta3/testing/constraints-3.8.txt | 6 - .../v1beta3/testing/constraints-3.9.txt | 6 - owl-bot-staging/v1beta3/tests/__init__.py | 16 - .../v1beta3/tests/unit/__init__.py | 16 - .../v1beta3/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/dataflow_v1beta3/__init__.py | 16 - .../test_flex_templates_service.py | 1459 ------- .../dataflow_v1beta3/test_jobs_v1_beta3.py | 3644 ----------------- .../test_messages_v1_beta3.py | 1713 -------- .../dataflow_v1beta3/test_metrics_v1_beta3.py | 2477 ----------- .../test_snapshots_v1_beta3.py | 2013 --------- .../test_templates_service.py | 2059 ---------- 143 files changed, 54 insertions(+), 41201 deletions(-) delete mode 100644 owl-bot-staging/v1beta3/.coveragerc delete mode 100644 owl-bot-staging/v1beta3/.flake8 delete mode 100644 owl-bot-staging/v1beta3/MANIFEST.in delete mode 100644 owl-bot-staging/v1beta3/README.rst delete mode 100644 owl-bot-staging/v1beta3/docs/conf.py delete mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/flex_templates_service.rst delete mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/jobs_v1_beta3.rst delete mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/messages_v1_beta3.rst delete mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/metrics_v1_beta3.rst delete mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/services.rst delete mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/snapshots_v1_beta3.rst delete mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/templates_service.rst delete mode 100644 owl-bot-staging/v1beta3/docs/dataflow_v1beta3/types.rst delete mode 100644 owl-bot-staging/v1beta3/docs/index.rst delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow/gapic_version.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow/py.typed delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_metadata.json delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_version.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/py.typed delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/client.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/__init__.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/environment.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/jobs.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/messages.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/metrics.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/snapshots.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/streaming.py delete mode 100644 owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/templates.py delete mode 100644 owl-bot-staging/v1beta3/mypy.ini delete mode 100644 owl-bot-staging/v1beta3/noxfile.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py delete mode 100644 owl-bot-staging/v1beta3/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json delete mode 100644 owl-bot-staging/v1beta3/scripts/fixup_dataflow_v1beta3_keywords.py delete mode 100644 owl-bot-staging/v1beta3/setup.py delete mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1beta3/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1beta3/tests/__init__.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/__init__.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py delete mode 100644 owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py diff --git a/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py index ed9312e..9d55045 100644 --- a/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py +++ b/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py @@ -47,10 +47,6 @@ rest_version=requests_version, ) -# TODO (numeric enums): This file was generated with the option to -# request that the server respond with enums JSON-encoded as -# numbers. The code below does not implement that functionality yet. - class FlexTemplatesServiceRestInterceptor: """Interceptor for FlexTemplatesService. @@ -67,12 +63,13 @@ class FlexTemplatesServiceRestInterceptor: .. code-block:: python class MyCustomFlexTemplatesServiceInterceptor(FlexTemplatesServiceRestInterceptor): - def pre_launch_flex_template(request, metadata): + def pre_launch_flex_template(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_launch_flex_template(response): + def post_launch_flex_template(self, response): logging.log(f"Received response: {response}") + return response transport = FlexTemplatesServiceRestTransport(interceptor=MyCustomFlexTemplatesServiceInterceptor()) client = FlexTemplatesServiceClient(transport=transport) diff --git a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py index b151109..18eaf06 100644 --- a/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py +++ b/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py @@ -47,10 +47,6 @@ rest_version=requests_version, ) -# TODO (numeric enums): This file was generated with the option to -# request that the server respond with enums JSON-encoded as -# numbers. The code below does not implement that functionality yet. - class JobsV1Beta3RestInterceptor: """Interceptor for JobsV1Beta3. @@ -67,54 +63,61 @@ class JobsV1Beta3RestInterceptor: .. code-block:: python class MyCustomJobsV1Beta3Interceptor(JobsV1Beta3RestInterceptor): - def pre_aggregated_list_jobs(request, metadata): + def pre_aggregated_list_jobs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_aggregated_list_jobs(response): + def post_aggregated_list_jobs(self, response): logging.log(f"Received response: {response}") + return response - def pre_check_active_jobs(request, metadata): + def pre_check_active_jobs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_check_active_jobs(response): + def post_check_active_jobs(self, response): logging.log(f"Received response: {response}") + return response - def pre_create_job(request, metadata): + def pre_create_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_job(response): + def post_create_job(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_job(request, metadata): + def pre_get_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_job(response): + def post_get_job(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_jobs(request, metadata): + def pre_list_jobs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_jobs(response): + def post_list_jobs(self, response): logging.log(f"Received response: {response}") + return response - def pre_snapshot_job(request, metadata): + def pre_snapshot_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_snapshot_job(response): + def post_snapshot_job(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_job(request, metadata): + def pre_update_job(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_job(response): + def post_update_job(self, response): logging.log(f"Received response: {response}") + return response transport = JobsV1Beta3RestTransport(interceptor=MyCustomJobsV1Beta3Interceptor()) client = JobsV1Beta3Client(transport=transport) diff --git a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py index 045698a..c6a61c7 100644 --- a/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py +++ b/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py @@ -47,10 +47,6 @@ rest_version=requests_version, ) -# TODO (numeric enums): This file was generated with the option to -# request that the server respond with enums JSON-encoded as -# numbers. The code below does not implement that functionality yet. - class MessagesV1Beta3RestInterceptor: """Interceptor for MessagesV1Beta3. @@ -67,12 +63,13 @@ class MessagesV1Beta3RestInterceptor: .. code-block:: python class MyCustomMessagesV1Beta3Interceptor(MessagesV1Beta3RestInterceptor): - def pre_list_job_messages(request, metadata): + def pre_list_job_messages(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_job_messages(response): + def post_list_job_messages(self, response): logging.log(f"Received response: {response}") + return response transport = MessagesV1Beta3RestTransport(interceptor=MyCustomMessagesV1Beta3Interceptor()) client = MessagesV1Beta3Client(transport=transport) diff --git a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py index 10b73c2..f32c0c9 100644 --- a/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py +++ b/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py @@ -47,10 +47,6 @@ rest_version=requests_version, ) -# TODO (numeric enums): This file was generated with the option to -# request that the server respond with enums JSON-encoded as -# numbers. The code below does not implement that functionality yet. - class MetricsV1Beta3RestInterceptor: """Interceptor for MetricsV1Beta3. @@ -67,26 +63,29 @@ class MetricsV1Beta3RestInterceptor: .. code-block:: python class MyCustomMetricsV1Beta3Interceptor(MetricsV1Beta3RestInterceptor): - def pre_get_job_execution_details(request, metadata): + def pre_get_job_execution_details(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_job_execution_details(response): + def post_get_job_execution_details(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_job_metrics(request, metadata): + def pre_get_job_metrics(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_job_metrics(response): + def post_get_job_metrics(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_stage_execution_details(request, metadata): + def pre_get_stage_execution_details(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_stage_execution_details(response): + def post_get_stage_execution_details(self, response): logging.log(f"Received response: {response}") + return response transport = MetricsV1Beta3RestTransport(interceptor=MyCustomMetricsV1Beta3Interceptor()) client = MetricsV1Beta3Client(transport=transport) diff --git a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py index 54cbbab..8f32c4b 100644 --- a/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py +++ b/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py @@ -47,10 +47,6 @@ rest_version=requests_version, ) -# TODO (numeric enums): This file was generated with the option to -# request that the server respond with enums JSON-encoded as -# numbers. The code below does not implement that functionality yet. - class SnapshotsV1Beta3RestInterceptor: """Interceptor for SnapshotsV1Beta3. @@ -67,26 +63,29 @@ class SnapshotsV1Beta3RestInterceptor: .. code-block:: python class MyCustomSnapshotsV1Beta3Interceptor(SnapshotsV1Beta3RestInterceptor): - def pre_delete_snapshot(request, metadata): + def pre_delete_snapshot(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_delete_snapshot(response): + def post_delete_snapshot(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_snapshot(request, metadata): + def pre_get_snapshot(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_snapshot(response): + def post_get_snapshot(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_snapshots(request, metadata): + def pre_list_snapshots(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_snapshots(response): + def post_list_snapshots(self, response): logging.log(f"Received response: {response}") + return response transport = SnapshotsV1Beta3RestTransport(interceptor=MyCustomSnapshotsV1Beta3Interceptor()) client = SnapshotsV1Beta3Client(transport=transport) diff --git a/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py b/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py index be3b8e4..a4ed0c1 100644 --- a/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py +++ b/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py @@ -47,10 +47,6 @@ rest_version=requests_version, ) -# TODO (numeric enums): This file was generated with the option to -# request that the server respond with enums JSON-encoded as -# numbers. The code below does not implement that functionality yet. - class TemplatesServiceRestInterceptor: """Interceptor for TemplatesService. @@ -67,26 +63,29 @@ class TemplatesServiceRestInterceptor: .. code-block:: python class MyCustomTemplatesServiceInterceptor(TemplatesServiceRestInterceptor): - def pre_create_job_from_template(request, metadata): + def pre_create_job_from_template(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_job_from_template(response): + def post_create_job_from_template(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_template(request, metadata): + def pre_get_template(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_template(response): + def post_get_template(self, response): logging.log(f"Received response: {response}") + return response - def pre_launch_template(request, metadata): + def pre_launch_template(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_launch_template(response): + def post_launch_template(self, response): logging.log(f"Received response: {response}") + return response transport = TemplatesServiceRestTransport(interceptor=MyCustomTemplatesServiceInterceptor()) client = TemplatesServiceClient(transport=transport) diff --git a/owl-bot-staging/v1beta3/.coveragerc b/owl-bot-staging/v1beta3/.coveragerc deleted file mode 100644 index bcaea1e..0000000 --- a/owl-bot-staging/v1beta3/.coveragerc +++ /dev/null @@ -1,12 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dataflow/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1beta3/.flake8 b/owl-bot-staging/v1beta3/.flake8 deleted file mode 100644 index 29227d4..0000000 --- a/owl-bot-staging/v1beta3/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1beta3/MANIFEST.in b/owl-bot-staging/v1beta3/MANIFEST.in deleted file mode 100644 index 8b3924f..0000000 --- a/owl-bot-staging/v1beta3/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/dataflow *.py -recursive-include google/cloud/dataflow_v1beta3 *.py diff --git a/owl-bot-staging/v1beta3/README.rst b/owl-bot-staging/v1beta3/README.rst deleted file mode 100644 index 28a6ee5..0000000 --- a/owl-bot-staging/v1beta3/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Dataflow API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dataflow API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1beta3/docs/conf.py b/owl-bot-staging/v1beta3/docs/conf.py deleted file mode 100644 index 635d9a4..0000000 --- a/owl-bot-staging/v1beta3/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dataflow-client documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-dataflow-client" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dataflow-client-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-dataflow-client.tex", - u"google-cloud-dataflow-client Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-dataflow-client", - u"Google Cloud Dataflow Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-dataflow-client", - u"google-cloud-dataflow-client Documentation", - author, - "google-cloud-dataflow-client", - "GAPIC library for Google Cloud Dataflow API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/flex_templates_service.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/flex_templates_service.rst deleted file mode 100644 index 5fc4461..0000000 --- a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/flex_templates_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -FlexTemplatesService --------------------------------------- - -.. automodule:: google.cloud.dataflow_v1beta3.services.flex_templates_service - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/jobs_v1_beta3.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/jobs_v1_beta3.rst deleted file mode 100644 index d2d95a8..0000000 --- a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/jobs_v1_beta3.rst +++ /dev/null @@ -1,10 +0,0 @@ -JobsV1Beta3 ------------------------------ - -.. automodule:: google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 - :members: - :inherited-members: - -.. automodule:: google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/messages_v1_beta3.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/messages_v1_beta3.rst deleted file mode 100644 index 0915205..0000000 --- a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/messages_v1_beta3.rst +++ /dev/null @@ -1,10 +0,0 @@ -MessagesV1Beta3 ---------------------------------- - -.. automodule:: google.cloud.dataflow_v1beta3.services.messages_v1_beta3 - :members: - :inherited-members: - -.. automodule:: google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/metrics_v1_beta3.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/metrics_v1_beta3.rst deleted file mode 100644 index 8ca5594..0000000 --- a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/metrics_v1_beta3.rst +++ /dev/null @@ -1,10 +0,0 @@ -MetricsV1Beta3 --------------------------------- - -.. automodule:: google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 - :members: - :inherited-members: - -.. automodule:: google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/services.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/services.rst deleted file mode 100644 index d890af6..0000000 --- a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/services.rst +++ /dev/null @@ -1,11 +0,0 @@ -Services for Google Cloud Dataflow v1beta3 API -============================================== -.. toctree:: - :maxdepth: 2 - - flex_templates_service - jobs_v1_beta3 - messages_v1_beta3 - metrics_v1_beta3 - snapshots_v1_beta3 - templates_service diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/snapshots_v1_beta3.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/snapshots_v1_beta3.rst deleted file mode 100644 index 4619e4d..0000000 --- a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/snapshots_v1_beta3.rst +++ /dev/null @@ -1,6 +0,0 @@ -SnapshotsV1Beta3 ----------------------------------- - -.. automodule:: google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/templates_service.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/templates_service.rst deleted file mode 100644 index ad832aa..0000000 --- a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/templates_service.rst +++ /dev/null @@ -1,6 +0,0 @@ -TemplatesService ----------------------------------- - -.. automodule:: google.cloud.dataflow_v1beta3.services.templates_service - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/types.rst b/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/types.rst deleted file mode 100644 index a768d4a..0000000 --- a/owl-bot-staging/v1beta3/docs/dataflow_v1beta3/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Dataflow v1beta3 API -=========================================== - -.. automodule:: google.cloud.dataflow_v1beta3.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1beta3/docs/index.rst b/owl-bot-staging/v1beta3/docs/index.rst deleted file mode 100644 index 59da2fa..0000000 --- a/owl-bot-staging/v1beta3/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dataflow_v1beta3/services - dataflow_v1beta3/types diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow/__init__.py deleted file mode 100644 index bace5a5..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow/__init__.py +++ /dev/null @@ -1,257 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dataflow import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.dataflow_v1beta3.services.flex_templates_service.client import FlexTemplatesServiceClient -from google.cloud.dataflow_v1beta3.services.flex_templates_service.async_client import FlexTemplatesServiceAsyncClient -from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.client import JobsV1Beta3Client -from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.async_client import JobsV1Beta3AsyncClient -from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.client import MessagesV1Beta3Client -from google.cloud.dataflow_v1beta3.services.messages_v1_beta3.async_client import MessagesV1Beta3AsyncClient -from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.client import MetricsV1Beta3Client -from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.async_client import MetricsV1Beta3AsyncClient -from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.client import SnapshotsV1Beta3Client -from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.async_client import SnapshotsV1Beta3AsyncClient -from google.cloud.dataflow_v1beta3.services.templates_service.client import TemplatesServiceClient -from google.cloud.dataflow_v1beta3.services.templates_service.async_client import TemplatesServiceAsyncClient - -from google.cloud.dataflow_v1beta3.types.environment import AutoscalingSettings -from google.cloud.dataflow_v1beta3.types.environment import DebugOptions -from google.cloud.dataflow_v1beta3.types.environment import Disk -from google.cloud.dataflow_v1beta3.types.environment import Environment -from google.cloud.dataflow_v1beta3.types.environment import Package -from google.cloud.dataflow_v1beta3.types.environment import SdkHarnessContainerImage -from google.cloud.dataflow_v1beta3.types.environment import TaskRunnerSettings -from google.cloud.dataflow_v1beta3.types.environment import WorkerPool -from google.cloud.dataflow_v1beta3.types.environment import WorkerSettings -from google.cloud.dataflow_v1beta3.types.environment import AutoscalingAlgorithm -from google.cloud.dataflow_v1beta3.types.environment import DefaultPackageSet -from google.cloud.dataflow_v1beta3.types.environment import FlexResourceSchedulingGoal -from google.cloud.dataflow_v1beta3.types.environment import JobType -from google.cloud.dataflow_v1beta3.types.environment import ShuffleMode -from google.cloud.dataflow_v1beta3.types.environment import TeardownPolicy -from google.cloud.dataflow_v1beta3.types.environment import WorkerIPAddressConfiguration -from google.cloud.dataflow_v1beta3.types.jobs import BigQueryIODetails -from google.cloud.dataflow_v1beta3.types.jobs import BigTableIODetails -from google.cloud.dataflow_v1beta3.types.jobs import CheckActiveJobsRequest -from google.cloud.dataflow_v1beta3.types.jobs import CheckActiveJobsResponse -from google.cloud.dataflow_v1beta3.types.jobs import CreateJobRequest -from google.cloud.dataflow_v1beta3.types.jobs import DatastoreIODetails -from google.cloud.dataflow_v1beta3.types.jobs import DisplayData -from google.cloud.dataflow_v1beta3.types.jobs import ExecutionStageState -from google.cloud.dataflow_v1beta3.types.jobs import ExecutionStageSummary -from google.cloud.dataflow_v1beta3.types.jobs import FailedLocation -from google.cloud.dataflow_v1beta3.types.jobs import FileIODetails -from google.cloud.dataflow_v1beta3.types.jobs import GetJobRequest -from google.cloud.dataflow_v1beta3.types.jobs import Job -from google.cloud.dataflow_v1beta3.types.jobs import JobExecutionInfo -from google.cloud.dataflow_v1beta3.types.jobs import JobExecutionStageInfo -from google.cloud.dataflow_v1beta3.types.jobs import JobMetadata -from google.cloud.dataflow_v1beta3.types.jobs import ListJobsRequest -from google.cloud.dataflow_v1beta3.types.jobs import ListJobsResponse -from google.cloud.dataflow_v1beta3.types.jobs import PipelineDescription -from google.cloud.dataflow_v1beta3.types.jobs import PubSubIODetails -from google.cloud.dataflow_v1beta3.types.jobs import SdkVersion -from google.cloud.dataflow_v1beta3.types.jobs import SnapshotJobRequest -from google.cloud.dataflow_v1beta3.types.jobs import SpannerIODetails -from google.cloud.dataflow_v1beta3.types.jobs import Step -from google.cloud.dataflow_v1beta3.types.jobs import TransformSummary -from google.cloud.dataflow_v1beta3.types.jobs import UpdateJobRequest -from google.cloud.dataflow_v1beta3.types.jobs import JobState -from google.cloud.dataflow_v1beta3.types.jobs import JobView -from google.cloud.dataflow_v1beta3.types.jobs import KindType -from google.cloud.dataflow_v1beta3.types.messages import AutoscalingEvent -from google.cloud.dataflow_v1beta3.types.messages import JobMessage -from google.cloud.dataflow_v1beta3.types.messages import ListJobMessagesRequest -from google.cloud.dataflow_v1beta3.types.messages import ListJobMessagesResponse -from google.cloud.dataflow_v1beta3.types.messages import StructuredMessage -from google.cloud.dataflow_v1beta3.types.messages import JobMessageImportance -from google.cloud.dataflow_v1beta3.types.metrics import GetJobExecutionDetailsRequest -from google.cloud.dataflow_v1beta3.types.metrics import GetJobMetricsRequest -from google.cloud.dataflow_v1beta3.types.metrics import GetStageExecutionDetailsRequest -from google.cloud.dataflow_v1beta3.types.metrics import JobExecutionDetails -from google.cloud.dataflow_v1beta3.types.metrics import JobMetrics -from google.cloud.dataflow_v1beta3.types.metrics import MetricStructuredName -from google.cloud.dataflow_v1beta3.types.metrics import MetricUpdate -from google.cloud.dataflow_v1beta3.types.metrics import ProgressTimeseries -from google.cloud.dataflow_v1beta3.types.metrics import StageExecutionDetails -from google.cloud.dataflow_v1beta3.types.metrics import StageSummary -from google.cloud.dataflow_v1beta3.types.metrics import WorkerDetails -from google.cloud.dataflow_v1beta3.types.metrics import WorkItemDetails -from google.cloud.dataflow_v1beta3.types.metrics import ExecutionState -from google.cloud.dataflow_v1beta3.types.snapshots import DeleteSnapshotRequest -from google.cloud.dataflow_v1beta3.types.snapshots import DeleteSnapshotResponse -from google.cloud.dataflow_v1beta3.types.snapshots import GetSnapshotRequest -from google.cloud.dataflow_v1beta3.types.snapshots import ListSnapshotsRequest -from google.cloud.dataflow_v1beta3.types.snapshots import ListSnapshotsResponse -from google.cloud.dataflow_v1beta3.types.snapshots import PubsubSnapshotMetadata -from google.cloud.dataflow_v1beta3.types.snapshots import Snapshot -from google.cloud.dataflow_v1beta3.types.snapshots import SnapshotState -from google.cloud.dataflow_v1beta3.types.streaming import ComputationTopology -from google.cloud.dataflow_v1beta3.types.streaming import CustomSourceLocation -from google.cloud.dataflow_v1beta3.types.streaming import DataDiskAssignment -from google.cloud.dataflow_v1beta3.types.streaming import KeyRangeDataDiskAssignment -from google.cloud.dataflow_v1beta3.types.streaming import KeyRangeLocation -from google.cloud.dataflow_v1beta3.types.streaming import MountedDataDisk -from google.cloud.dataflow_v1beta3.types.streaming import PubsubLocation -from google.cloud.dataflow_v1beta3.types.streaming import StateFamilyConfig -from google.cloud.dataflow_v1beta3.types.streaming import StreamingApplianceSnapshotConfig -from google.cloud.dataflow_v1beta3.types.streaming import StreamingComputationRanges -from google.cloud.dataflow_v1beta3.types.streaming import StreamingSideInputLocation -from google.cloud.dataflow_v1beta3.types.streaming import StreamingStageLocation -from google.cloud.dataflow_v1beta3.types.streaming import StreamLocation -from google.cloud.dataflow_v1beta3.types.streaming import TopologyConfig -from google.cloud.dataflow_v1beta3.types.templates import ContainerSpec -from google.cloud.dataflow_v1beta3.types.templates import CreateJobFromTemplateRequest -from google.cloud.dataflow_v1beta3.types.templates import DynamicTemplateLaunchParams -from google.cloud.dataflow_v1beta3.types.templates import FlexTemplateRuntimeEnvironment -from google.cloud.dataflow_v1beta3.types.templates import GetTemplateRequest -from google.cloud.dataflow_v1beta3.types.templates import GetTemplateResponse -from google.cloud.dataflow_v1beta3.types.templates import InvalidTemplateParameters -from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateParameter -from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateRequest -from google.cloud.dataflow_v1beta3.types.templates import LaunchFlexTemplateResponse -from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateParameters -from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateRequest -from google.cloud.dataflow_v1beta3.types.templates import LaunchTemplateResponse -from google.cloud.dataflow_v1beta3.types.templates import ParameterMetadata -from google.cloud.dataflow_v1beta3.types.templates import RuntimeEnvironment -from google.cloud.dataflow_v1beta3.types.templates import RuntimeMetadata -from google.cloud.dataflow_v1beta3.types.templates import SDKInfo -from google.cloud.dataflow_v1beta3.types.templates import TemplateMetadata -from google.cloud.dataflow_v1beta3.types.templates import ParameterType - -__all__ = ('FlexTemplatesServiceClient', - 'FlexTemplatesServiceAsyncClient', - 'JobsV1Beta3Client', - 'JobsV1Beta3AsyncClient', - 'MessagesV1Beta3Client', - 'MessagesV1Beta3AsyncClient', - 'MetricsV1Beta3Client', - 'MetricsV1Beta3AsyncClient', - 'SnapshotsV1Beta3Client', - 'SnapshotsV1Beta3AsyncClient', - 'TemplatesServiceClient', - 'TemplatesServiceAsyncClient', - 'AutoscalingSettings', - 'DebugOptions', - 'Disk', - 'Environment', - 'Package', - 'SdkHarnessContainerImage', - 'TaskRunnerSettings', - 'WorkerPool', - 'WorkerSettings', - 'AutoscalingAlgorithm', - 'DefaultPackageSet', - 'FlexResourceSchedulingGoal', - 'JobType', - 'ShuffleMode', - 'TeardownPolicy', - 'WorkerIPAddressConfiguration', - 'BigQueryIODetails', - 'BigTableIODetails', - 'CheckActiveJobsRequest', - 'CheckActiveJobsResponse', - 'CreateJobRequest', - 'DatastoreIODetails', - 'DisplayData', - 'ExecutionStageState', - 'ExecutionStageSummary', - 'FailedLocation', - 'FileIODetails', - 'GetJobRequest', - 'Job', - 'JobExecutionInfo', - 'JobExecutionStageInfo', - 'JobMetadata', - 'ListJobsRequest', - 'ListJobsResponse', - 'PipelineDescription', - 'PubSubIODetails', - 'SdkVersion', - 'SnapshotJobRequest', - 'SpannerIODetails', - 'Step', - 'TransformSummary', - 'UpdateJobRequest', - 'JobState', - 'JobView', - 'KindType', - 'AutoscalingEvent', - 'JobMessage', - 'ListJobMessagesRequest', - 'ListJobMessagesResponse', - 'StructuredMessage', - 'JobMessageImportance', - 'GetJobExecutionDetailsRequest', - 'GetJobMetricsRequest', - 'GetStageExecutionDetailsRequest', - 'JobExecutionDetails', - 'JobMetrics', - 'MetricStructuredName', - 'MetricUpdate', - 'ProgressTimeseries', - 'StageExecutionDetails', - 'StageSummary', - 'WorkerDetails', - 'WorkItemDetails', - 'ExecutionState', - 'DeleteSnapshotRequest', - 'DeleteSnapshotResponse', - 'GetSnapshotRequest', - 'ListSnapshotsRequest', - 'ListSnapshotsResponse', - 'PubsubSnapshotMetadata', - 'Snapshot', - 'SnapshotState', - 'ComputationTopology', - 'CustomSourceLocation', - 'DataDiskAssignment', - 'KeyRangeDataDiskAssignment', - 'KeyRangeLocation', - 'MountedDataDisk', - 'PubsubLocation', - 'StateFamilyConfig', - 'StreamingApplianceSnapshotConfig', - 'StreamingComputationRanges', - 'StreamingSideInputLocation', - 'StreamingStageLocation', - 'StreamLocation', - 'TopologyConfig', - 'ContainerSpec', - 'CreateJobFromTemplateRequest', - 'DynamicTemplateLaunchParams', - 'FlexTemplateRuntimeEnvironment', - 'GetTemplateRequest', - 'GetTemplateResponse', - 'InvalidTemplateParameters', - 'LaunchFlexTemplateParameter', - 'LaunchFlexTemplateRequest', - 'LaunchFlexTemplateResponse', - 'LaunchTemplateParameters', - 'LaunchTemplateRequest', - 'LaunchTemplateResponse', - 'ParameterMetadata', - 'RuntimeEnvironment', - 'RuntimeMetadata', - 'SDKInfo', - 'TemplateMetadata', - 'ParameterType', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow/gapic_version.py b/owl-bot-staging/v1beta3/google/cloud/dataflow/gapic_version.py deleted file mode 100644 index 405b1ce..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow/py.typed b/owl-bot-staging/v1beta3/google/cloud/dataflow/py.typed deleted file mode 100644 index db7ad15..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dataflow-client package uses inline types. diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/__init__.py deleted file mode 100644 index d2ab386..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/__init__.py +++ /dev/null @@ -1,258 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.dataflow import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.flex_templates_service import FlexTemplatesServiceClient -from .services.flex_templates_service import FlexTemplatesServiceAsyncClient -from .services.jobs_v1_beta3 import JobsV1Beta3Client -from .services.jobs_v1_beta3 import JobsV1Beta3AsyncClient -from .services.messages_v1_beta3 import MessagesV1Beta3Client -from .services.messages_v1_beta3 import MessagesV1Beta3AsyncClient -from .services.metrics_v1_beta3 import MetricsV1Beta3Client -from .services.metrics_v1_beta3 import MetricsV1Beta3AsyncClient -from .services.snapshots_v1_beta3 import SnapshotsV1Beta3Client -from .services.snapshots_v1_beta3 import SnapshotsV1Beta3AsyncClient -from .services.templates_service import TemplatesServiceClient -from .services.templates_service import TemplatesServiceAsyncClient - -from .types.environment import AutoscalingSettings -from .types.environment import DebugOptions -from .types.environment import Disk -from .types.environment import Environment -from .types.environment import Package -from .types.environment import SdkHarnessContainerImage -from .types.environment import TaskRunnerSettings -from .types.environment import WorkerPool -from .types.environment import WorkerSettings -from .types.environment import AutoscalingAlgorithm -from .types.environment import DefaultPackageSet -from .types.environment import FlexResourceSchedulingGoal -from .types.environment import JobType -from .types.environment import ShuffleMode -from .types.environment import TeardownPolicy -from .types.environment import WorkerIPAddressConfiguration -from .types.jobs import BigQueryIODetails -from .types.jobs import BigTableIODetails -from .types.jobs import CheckActiveJobsRequest -from .types.jobs import CheckActiveJobsResponse -from .types.jobs import CreateJobRequest -from .types.jobs import DatastoreIODetails -from .types.jobs import DisplayData -from .types.jobs import ExecutionStageState -from .types.jobs import ExecutionStageSummary -from .types.jobs import FailedLocation -from .types.jobs import FileIODetails -from .types.jobs import GetJobRequest -from .types.jobs import Job -from .types.jobs import JobExecutionInfo -from .types.jobs import JobExecutionStageInfo -from .types.jobs import JobMetadata -from .types.jobs import ListJobsRequest -from .types.jobs import ListJobsResponse -from .types.jobs import PipelineDescription -from .types.jobs import PubSubIODetails -from .types.jobs import SdkVersion -from .types.jobs import SnapshotJobRequest -from .types.jobs import SpannerIODetails -from .types.jobs import Step -from .types.jobs import TransformSummary -from .types.jobs import UpdateJobRequest -from .types.jobs import JobState -from .types.jobs import JobView -from .types.jobs import KindType -from .types.messages import AutoscalingEvent -from .types.messages import JobMessage -from .types.messages import ListJobMessagesRequest -from .types.messages import ListJobMessagesResponse -from .types.messages import StructuredMessage -from .types.messages import JobMessageImportance -from .types.metrics import GetJobExecutionDetailsRequest -from .types.metrics import GetJobMetricsRequest -from .types.metrics import GetStageExecutionDetailsRequest -from .types.metrics import JobExecutionDetails -from .types.metrics import JobMetrics -from .types.metrics import MetricStructuredName -from .types.metrics import MetricUpdate -from .types.metrics import ProgressTimeseries -from .types.metrics import StageExecutionDetails -from .types.metrics import StageSummary -from .types.metrics import WorkerDetails -from .types.metrics import WorkItemDetails -from .types.metrics import ExecutionState -from .types.snapshots import DeleteSnapshotRequest -from .types.snapshots import DeleteSnapshotResponse -from .types.snapshots import GetSnapshotRequest -from .types.snapshots import ListSnapshotsRequest -from .types.snapshots import ListSnapshotsResponse -from .types.snapshots import PubsubSnapshotMetadata -from .types.snapshots import Snapshot -from .types.snapshots import SnapshotState -from .types.streaming import ComputationTopology -from .types.streaming import CustomSourceLocation -from .types.streaming import DataDiskAssignment -from .types.streaming import KeyRangeDataDiskAssignment -from .types.streaming import KeyRangeLocation -from .types.streaming import MountedDataDisk -from .types.streaming import PubsubLocation -from .types.streaming import StateFamilyConfig -from .types.streaming import StreamingApplianceSnapshotConfig -from .types.streaming import StreamingComputationRanges -from .types.streaming import StreamingSideInputLocation -from .types.streaming import StreamingStageLocation -from .types.streaming import StreamLocation -from .types.streaming import TopologyConfig -from .types.templates import ContainerSpec -from .types.templates import CreateJobFromTemplateRequest -from .types.templates import DynamicTemplateLaunchParams -from .types.templates import FlexTemplateRuntimeEnvironment -from .types.templates import GetTemplateRequest -from .types.templates import GetTemplateResponse -from .types.templates import InvalidTemplateParameters -from .types.templates import LaunchFlexTemplateParameter -from .types.templates import LaunchFlexTemplateRequest -from .types.templates import LaunchFlexTemplateResponse -from .types.templates import LaunchTemplateParameters -from .types.templates import LaunchTemplateRequest -from .types.templates import LaunchTemplateResponse -from .types.templates import ParameterMetadata -from .types.templates import RuntimeEnvironment -from .types.templates import RuntimeMetadata -from .types.templates import SDKInfo -from .types.templates import TemplateMetadata -from .types.templates import ParameterType - -__all__ = ( - 'FlexTemplatesServiceAsyncClient', - 'JobsV1Beta3AsyncClient', - 'MessagesV1Beta3AsyncClient', - 'MetricsV1Beta3AsyncClient', - 'SnapshotsV1Beta3AsyncClient', - 'TemplatesServiceAsyncClient', -'AutoscalingAlgorithm', -'AutoscalingEvent', -'AutoscalingSettings', -'BigQueryIODetails', -'BigTableIODetails', -'CheckActiveJobsRequest', -'CheckActiveJobsResponse', -'ComputationTopology', -'ContainerSpec', -'CreateJobFromTemplateRequest', -'CreateJobRequest', -'CustomSourceLocation', -'DataDiskAssignment', -'DatastoreIODetails', -'DebugOptions', -'DefaultPackageSet', -'DeleteSnapshotRequest', -'DeleteSnapshotResponse', -'Disk', -'DisplayData', -'DynamicTemplateLaunchParams', -'Environment', -'ExecutionStageState', -'ExecutionStageSummary', -'ExecutionState', -'FailedLocation', -'FileIODetails', -'FlexResourceSchedulingGoal', -'FlexTemplateRuntimeEnvironment', -'FlexTemplatesServiceClient', -'GetJobExecutionDetailsRequest', -'GetJobMetricsRequest', -'GetJobRequest', -'GetSnapshotRequest', -'GetStageExecutionDetailsRequest', -'GetTemplateRequest', -'GetTemplateResponse', -'InvalidTemplateParameters', -'Job', -'JobExecutionDetails', -'JobExecutionInfo', -'JobExecutionStageInfo', -'JobMessage', -'JobMessageImportance', -'JobMetadata', -'JobMetrics', -'JobState', -'JobType', -'JobView', -'JobsV1Beta3Client', -'KeyRangeDataDiskAssignment', -'KeyRangeLocation', -'KindType', -'LaunchFlexTemplateParameter', -'LaunchFlexTemplateRequest', -'LaunchFlexTemplateResponse', -'LaunchTemplateParameters', -'LaunchTemplateRequest', -'LaunchTemplateResponse', -'ListJobMessagesRequest', -'ListJobMessagesResponse', -'ListJobsRequest', -'ListJobsResponse', -'ListSnapshotsRequest', -'ListSnapshotsResponse', -'MessagesV1Beta3Client', -'MetricStructuredName', -'MetricUpdate', -'MetricsV1Beta3Client', -'MountedDataDisk', -'Package', -'ParameterMetadata', -'ParameterType', -'PipelineDescription', -'ProgressTimeseries', -'PubSubIODetails', -'PubsubLocation', -'PubsubSnapshotMetadata', -'RuntimeEnvironment', -'RuntimeMetadata', -'SDKInfo', -'SdkHarnessContainerImage', -'SdkVersion', -'ShuffleMode', -'Snapshot', -'SnapshotJobRequest', -'SnapshotState', -'SnapshotsV1Beta3Client', -'SpannerIODetails', -'StageExecutionDetails', -'StageSummary', -'StateFamilyConfig', -'Step', -'StreamLocation', -'StreamingApplianceSnapshotConfig', -'StreamingComputationRanges', -'StreamingSideInputLocation', -'StreamingStageLocation', -'StructuredMessage', -'TaskRunnerSettings', -'TeardownPolicy', -'TemplateMetadata', -'TemplatesServiceClient', -'TopologyConfig', -'TransformSummary', -'UpdateJobRequest', -'WorkItemDetails', -'WorkerDetails', -'WorkerIPAddressConfiguration', -'WorkerPool', -'WorkerSettings', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_metadata.json b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_metadata.json deleted file mode 100644 index ab8a5b6..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_metadata.json +++ /dev/null @@ -1,393 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dataflow_v1beta3", - "protoPackage": "google.dataflow.v1beta3", - "schema": "1.0", - "services": { - "FlexTemplatesService": { - "clients": { - "grpc": { - "libraryClient": "FlexTemplatesServiceClient", - "rpcs": { - "LaunchFlexTemplate": { - "methods": [ - "launch_flex_template" - ] - } - } - }, - "grpc-async": { - "libraryClient": "FlexTemplatesServiceAsyncClient", - "rpcs": { - "LaunchFlexTemplate": { - "methods": [ - "launch_flex_template" - ] - } - } - }, - "rest": { - "libraryClient": "FlexTemplatesServiceClient", - "rpcs": { - "LaunchFlexTemplate": { - "methods": [ - "launch_flex_template" - ] - } - } - } - } - }, - "JobsV1Beta3": { - "clients": { - "grpc": { - "libraryClient": "JobsV1Beta3Client", - "rpcs": { - "AggregatedListJobs": { - "methods": [ - "aggregated_list_jobs" - ] - }, - "CheckActiveJobs": { - "methods": [ - "check_active_jobs" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "SnapshotJob": { - "methods": [ - "snapshot_job" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - } - } - }, - "grpc-async": { - "libraryClient": "JobsV1Beta3AsyncClient", - "rpcs": { - "AggregatedListJobs": { - "methods": [ - "aggregated_list_jobs" - ] - }, - "CheckActiveJobs": { - "methods": [ - "check_active_jobs" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "SnapshotJob": { - "methods": [ - "snapshot_job" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - } - } - }, - "rest": { - "libraryClient": "JobsV1Beta3Client", - "rpcs": { - "AggregatedListJobs": { - "methods": [ - "aggregated_list_jobs" - ] - }, - "CheckActiveJobs": { - "methods": [ - "check_active_jobs" - ] - }, - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - }, - "SnapshotJob": { - "methods": [ - "snapshot_job" - ] - }, - "UpdateJob": { - "methods": [ - "update_job" - ] - } - } - } - } - }, - "MessagesV1Beta3": { - "clients": { - "grpc": { - "libraryClient": "MessagesV1Beta3Client", - "rpcs": { - "ListJobMessages": { - "methods": [ - "list_job_messages" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MessagesV1Beta3AsyncClient", - "rpcs": { - "ListJobMessages": { - "methods": [ - "list_job_messages" - ] - } - } - }, - "rest": { - "libraryClient": "MessagesV1Beta3Client", - "rpcs": { - "ListJobMessages": { - "methods": [ - "list_job_messages" - ] - } - } - } - } - }, - "MetricsV1Beta3": { - "clients": { - "grpc": { - "libraryClient": "MetricsV1Beta3Client", - "rpcs": { - "GetJobExecutionDetails": { - "methods": [ - "get_job_execution_details" - ] - }, - "GetJobMetrics": { - "methods": [ - "get_job_metrics" - ] - }, - "GetStageExecutionDetails": { - "methods": [ - "get_stage_execution_details" - ] - } - } - }, - "grpc-async": { - "libraryClient": "MetricsV1Beta3AsyncClient", - "rpcs": { - "GetJobExecutionDetails": { - "methods": [ - "get_job_execution_details" - ] - }, - "GetJobMetrics": { - "methods": [ - "get_job_metrics" - ] - }, - "GetStageExecutionDetails": { - "methods": [ - "get_stage_execution_details" - ] - } - } - }, - "rest": { - "libraryClient": "MetricsV1Beta3Client", - "rpcs": { - "GetJobExecutionDetails": { - "methods": [ - "get_job_execution_details" - ] - }, - "GetJobMetrics": { - "methods": [ - "get_job_metrics" - ] - }, - "GetStageExecutionDetails": { - "methods": [ - "get_stage_execution_details" - ] - } - } - } - } - }, - "SnapshotsV1Beta3": { - "clients": { - "grpc": { - "libraryClient": "SnapshotsV1Beta3Client", - "rpcs": { - "DeleteSnapshot": { - "methods": [ - "delete_snapshot" - ] - }, - "GetSnapshot": { - "methods": [ - "get_snapshot" - ] - }, - "ListSnapshots": { - "methods": [ - "list_snapshots" - ] - } - } - }, - "grpc-async": { - "libraryClient": "SnapshotsV1Beta3AsyncClient", - "rpcs": { - "DeleteSnapshot": { - "methods": [ - "delete_snapshot" - ] - }, - "GetSnapshot": { - "methods": [ - "get_snapshot" - ] - }, - "ListSnapshots": { - "methods": [ - "list_snapshots" - ] - } - } - }, - "rest": { - "libraryClient": "SnapshotsV1Beta3Client", - "rpcs": { - "DeleteSnapshot": { - "methods": [ - "delete_snapshot" - ] - }, - "GetSnapshot": { - "methods": [ - "get_snapshot" - ] - }, - "ListSnapshots": { - "methods": [ - "list_snapshots" - ] - } - } - } - } - }, - "TemplatesService": { - "clients": { - "grpc": { - "libraryClient": "TemplatesServiceClient", - "rpcs": { - "CreateJobFromTemplate": { - "methods": [ - "create_job_from_template" - ] - }, - "GetTemplate": { - "methods": [ - "get_template" - ] - }, - "LaunchTemplate": { - "methods": [ - "launch_template" - ] - } - } - }, - "grpc-async": { - "libraryClient": "TemplatesServiceAsyncClient", - "rpcs": { - "CreateJobFromTemplate": { - "methods": [ - "create_job_from_template" - ] - }, - "GetTemplate": { - "methods": [ - "get_template" - ] - }, - "LaunchTemplate": { - "methods": [ - "launch_template" - ] - } - } - }, - "rest": { - "libraryClient": "TemplatesServiceClient", - "rpcs": { - "CreateJobFromTemplate": { - "methods": [ - "create_job_from_template" - ] - }, - "GetTemplate": { - "methods": [ - "get_template" - ] - }, - "LaunchTemplate": { - "methods": [ - "launch_template" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_version.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_version.py deleted file mode 100644 index 405b1ce..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/py.typed b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/py.typed deleted file mode 100644 index db7ad15..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dataflow-client package uses inline types. diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py deleted file mode 100644 index 107271e..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import FlexTemplatesServiceClient -from .async_client import FlexTemplatesServiceAsyncClient - -__all__ = ( - 'FlexTemplatesServiceClient', - 'FlexTemplatesServiceAsyncClient', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py deleted file mode 100644 index f92bbb5..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/async_client.py +++ /dev/null @@ -1,279 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates -from .transports.base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport -from .client import FlexTemplatesServiceClient - - -class FlexTemplatesServiceAsyncClient: - """Provides a service for Flex templates. This feature is not - ready yet. - """ - - _client: FlexTemplatesServiceClient - - DEFAULT_ENDPOINT = FlexTemplatesServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = FlexTemplatesServiceClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(FlexTemplatesServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(FlexTemplatesServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(FlexTemplatesServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(FlexTemplatesServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(FlexTemplatesServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(FlexTemplatesServiceClient.parse_common_organization_path) - common_project_path = staticmethod(FlexTemplatesServiceClient.common_project_path) - parse_common_project_path = staticmethod(FlexTemplatesServiceClient.parse_common_project_path) - common_location_path = staticmethod(FlexTemplatesServiceClient.common_location_path) - parse_common_location_path = staticmethod(FlexTemplatesServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FlexTemplatesServiceAsyncClient: The constructed client. - """ - return FlexTemplatesServiceClient.from_service_account_info.__func__(FlexTemplatesServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FlexTemplatesServiceAsyncClient: The constructed client. - """ - return FlexTemplatesServiceClient.from_service_account_file.__func__(FlexTemplatesServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return FlexTemplatesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> FlexTemplatesServiceTransport: - """Returns the transport used by the client instance. - - Returns: - FlexTemplatesServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(FlexTemplatesServiceClient).get_transport_class, type(FlexTemplatesServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, FlexTemplatesServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the flex templates service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.FlexTemplatesServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = FlexTemplatesServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def launch_flex_template(self, - request: Optional[Union[templates.LaunchFlexTemplateRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> templates.LaunchFlexTemplateResponse: - r"""Launch a job with a FlexTemplate. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_launch_flex_template(): - # Create a client - client = dataflow_v1beta3.FlexTemplatesServiceAsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.LaunchFlexTemplateRequest( - ) - - # Make the request - response = await client.launch_flex_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest, dict]]): - The request object. A request to launch a Cloud Dataflow - job from a FlexTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse: - Response to the request to launch a - job from Flex Template. - - """ - # Create or coerce a protobuf request object. - request = templates.LaunchFlexTemplateRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.launch_flex_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "FlexTemplatesServiceAsyncClient", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py deleted file mode 100644 index 2c3d168..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/client.py +++ /dev/null @@ -1,475 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates -from .transports.base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import FlexTemplatesServiceGrpcTransport -from .transports.grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport -from .transports.rest import FlexTemplatesServiceRestTransport - - -class FlexTemplatesServiceClientMeta(type): - """Metaclass for the FlexTemplatesService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[FlexTemplatesServiceTransport]] - _transport_registry["grpc"] = FlexTemplatesServiceGrpcTransport - _transport_registry["grpc_asyncio"] = FlexTemplatesServiceGrpcAsyncIOTransport - _transport_registry["rest"] = FlexTemplatesServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[FlexTemplatesServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class FlexTemplatesServiceClient(metaclass=FlexTemplatesServiceClientMeta): - """Provides a service for Flex templates. This feature is not - ready yet. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataflow.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FlexTemplatesServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - FlexTemplatesServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> FlexTemplatesServiceTransport: - """Returns the transport used by the client instance. - - Returns: - FlexTemplatesServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, FlexTemplatesServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the flex templates service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, FlexTemplatesServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, FlexTemplatesServiceTransport): - # transport is a FlexTemplatesServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def launch_flex_template(self, - request: Optional[Union[templates.LaunchFlexTemplateRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> templates.LaunchFlexTemplateResponse: - r"""Launch a job with a FlexTemplate. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_launch_flex_template(): - # Create a client - client = dataflow_v1beta3.FlexTemplatesServiceClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.LaunchFlexTemplateRequest( - ) - - # Make the request - response = client.launch_flex_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest, dict]): - The request object. A request to launch a Cloud Dataflow - job from a FlexTemplate. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse: - Response to the request to launch a - job from Flex Template. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a templates.LaunchFlexTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, templates.LaunchFlexTemplateRequest): - request = templates.LaunchFlexTemplateRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.launch_flex_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "FlexTemplatesServiceClient", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py deleted file mode 100644 index 3688dba..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import FlexTemplatesServiceTransport -from .grpc import FlexTemplatesServiceGrpcTransport -from .grpc_asyncio import FlexTemplatesServiceGrpcAsyncIOTransport -from .rest import FlexTemplatesServiceRestTransport -from .rest import FlexTemplatesServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[FlexTemplatesServiceTransport]] -_transport_registry['grpc'] = FlexTemplatesServiceGrpcTransport -_transport_registry['grpc_asyncio'] = FlexTemplatesServiceGrpcAsyncIOTransport -_transport_registry['rest'] = FlexTemplatesServiceRestTransport - -__all__ = ( - 'FlexTemplatesServiceTransport', - 'FlexTemplatesServiceGrpcTransport', - 'FlexTemplatesServiceGrpcAsyncIOTransport', - 'FlexTemplatesServiceRestTransport', - 'FlexTemplatesServiceRestInterceptor', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py deleted file mode 100644 index 65c0480..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/base.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataflow_v1beta3.types import templates - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class FlexTemplatesServiceTransport(abc.ABC): - """Abstract transport class for FlexTemplatesService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', - ) - - DEFAULT_HOST: str = 'dataflow.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.launch_flex_template: gapic_v1.method.wrap_method( - self.launch_flex_template, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def launch_flex_template(self) -> Callable[ - [templates.LaunchFlexTemplateRequest], - Union[ - templates.LaunchFlexTemplateResponse, - Awaitable[templates.LaunchFlexTemplateResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'FlexTemplatesServiceTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py deleted file mode 100644 index d953d13..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc.py +++ /dev/null @@ -1,265 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataflow_v1beta3.types import templates -from .base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO - - -class FlexTemplatesServiceGrpcTransport(FlexTemplatesServiceTransport): - """gRPC backend transport for FlexTemplatesService. - - Provides a service for Flex templates. This feature is not - ready yet. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def launch_flex_template(self) -> Callable[ - [templates.LaunchFlexTemplateRequest], - templates.LaunchFlexTemplateResponse]: - r"""Return a callable for the launch flex template method over gRPC. - - Launch a job with a FlexTemplate. - - Returns: - Callable[[~.LaunchFlexTemplateRequest], - ~.LaunchFlexTemplateResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'launch_flex_template' not in self._stubs: - self._stubs['launch_flex_template'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.FlexTemplatesService/LaunchFlexTemplate', - request_serializer=templates.LaunchFlexTemplateRequest.serialize, - response_deserializer=templates.LaunchFlexTemplateResponse.deserialize, - ) - return self._stubs['launch_flex_template'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'FlexTemplatesServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py deleted file mode 100644 index 395fc0a..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,264 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataflow_v1beta3.types import templates -from .base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import FlexTemplatesServiceGrpcTransport - - -class FlexTemplatesServiceGrpcAsyncIOTransport(FlexTemplatesServiceTransport): - """gRPC AsyncIO backend transport for FlexTemplatesService. - - Provides a service for Flex templates. This feature is not - ready yet. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def launch_flex_template(self) -> Callable[ - [templates.LaunchFlexTemplateRequest], - Awaitable[templates.LaunchFlexTemplateResponse]]: - r"""Return a callable for the launch flex template method over gRPC. - - Launch a job with a FlexTemplate. - - Returns: - Callable[[~.LaunchFlexTemplateRequest], - Awaitable[~.LaunchFlexTemplateResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'launch_flex_template' not in self._stubs: - self._stubs['launch_flex_template'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.FlexTemplatesService/LaunchFlexTemplate', - request_serializer=templates.LaunchFlexTemplateRequest.serialize, - response_deserializer=templates.LaunchFlexTemplateResponse.deserialize, - ) - return self._stubs['launch_flex_template'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'FlexTemplatesServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py deleted file mode 100644 index e3cd96a..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/flex_templates_service/transports/rest.py +++ /dev/null @@ -1,294 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataflow_v1beta3.types import templates - -from .base import FlexTemplatesServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class FlexTemplatesServiceRestInterceptor: - """Interceptor for FlexTemplatesService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the FlexTemplatesServiceRestTransport. - - .. code-block:: python - class MyCustomFlexTemplatesServiceInterceptor(FlexTemplatesServiceRestInterceptor): - def pre_launch_flex_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_launch_flex_template(self, response): - logging.log(f"Received response: {response}") - return response - - transport = FlexTemplatesServiceRestTransport(interceptor=MyCustomFlexTemplatesServiceInterceptor()) - client = FlexTemplatesServiceClient(transport=transport) - - - """ - def pre_launch_flex_template(self, request: templates.LaunchFlexTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[templates.LaunchFlexTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for launch_flex_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the FlexTemplatesService server. - """ - return request, metadata - - def post_launch_flex_template(self, response: templates.LaunchFlexTemplateResponse) -> templates.LaunchFlexTemplateResponse: - """Post-rpc interceptor for launch_flex_template - - Override in a subclass to manipulate the response - after it is returned by the FlexTemplatesService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class FlexTemplatesServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: FlexTemplatesServiceRestInterceptor - - -class FlexTemplatesServiceRestTransport(FlexTemplatesServiceTransport): - """REST backend transport for FlexTemplatesService. - - Provides a service for Flex templates. This feature is not - ready yet. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[FlexTemplatesServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or FlexTemplatesServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _LaunchFlexTemplate(FlexTemplatesServiceRestStub): - def __hash__(self): - return hash("LaunchFlexTemplate") - - def __call__(self, - request: templates.LaunchFlexTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> templates.LaunchFlexTemplateResponse: - r"""Call the launch flex template method over HTTP. - - Args: - request (~.templates.LaunchFlexTemplateRequest): - The request object. A request to launch a Cloud Dataflow - job from a FlexTemplate. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.templates.LaunchFlexTemplateResponse: - Response to the request to launch a - job from Flex Template. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/flexTemplates:launch', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_launch_flex_template(request, metadata) - pb_request = templates.LaunchFlexTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = templates.LaunchFlexTemplateResponse() - pb_resp = templates.LaunchFlexTemplateResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_launch_flex_template(resp) - return resp - - @property - def launch_flex_template(self) -> Callable[ - [templates.LaunchFlexTemplateRequest], - templates.LaunchFlexTemplateResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._LaunchFlexTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'FlexTemplatesServiceRestTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py deleted file mode 100644 index 3dac587..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import JobsV1Beta3Client -from .async_client import JobsV1Beta3AsyncClient - -__all__ = ( - 'JobsV1Beta3Client', - 'JobsV1Beta3AsyncClient', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py deleted file mode 100644 index ff51a05..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/async_client.py +++ /dev/null @@ -1,825 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.types import environment -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import snapshots -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport -from .client import JobsV1Beta3Client - - -class JobsV1Beta3AsyncClient: - """Provides a method to create and modify Google Cloud Dataflow - jobs. A Job is a multi-stage computation graph run by the Cloud - Dataflow service. - """ - - _client: JobsV1Beta3Client - - DEFAULT_ENDPOINT = JobsV1Beta3Client.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = JobsV1Beta3Client.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(JobsV1Beta3Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(JobsV1Beta3Client.parse_common_billing_account_path) - common_folder_path = staticmethod(JobsV1Beta3Client.common_folder_path) - parse_common_folder_path = staticmethod(JobsV1Beta3Client.parse_common_folder_path) - common_organization_path = staticmethod(JobsV1Beta3Client.common_organization_path) - parse_common_organization_path = staticmethod(JobsV1Beta3Client.parse_common_organization_path) - common_project_path = staticmethod(JobsV1Beta3Client.common_project_path) - parse_common_project_path = staticmethod(JobsV1Beta3Client.parse_common_project_path) - common_location_path = staticmethod(JobsV1Beta3Client.common_location_path) - parse_common_location_path = staticmethod(JobsV1Beta3Client.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobsV1Beta3AsyncClient: The constructed client. - """ - return JobsV1Beta3Client.from_service_account_info.__func__(JobsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobsV1Beta3AsyncClient: The constructed client. - """ - return JobsV1Beta3Client.from_service_account_file.__func__(JobsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return JobsV1Beta3Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> JobsV1Beta3Transport: - """Returns the transport used by the client instance. - - Returns: - JobsV1Beta3Transport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(JobsV1Beta3Client).get_transport_class, type(JobsV1Beta3Client)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, JobsV1Beta3Transport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the jobs v1 beta3 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.JobsV1Beta3Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = JobsV1Beta3Client( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_job(self, - request: Optional[Union[jobs.CreateJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Creates a Cloud Dataflow job. - - To create a job, we recommend using - ``projects.locations.jobs.create`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.create`` is not recommended, as your job - will always start in ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_create_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.CreateJobRequest( - ) - - # Make the request - response = await client.create_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.CreateJobRequest, dict]]): - The request object. Request to create a Cloud Dataflow - job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - # Create or coerce a protobuf request object. - request = jobs.CreateJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: Optional[Union[jobs.GetJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Gets the state of the specified Cloud Dataflow job. - - To get the state of a job, we recommend using - ``projects.locations.jobs.get`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.get`` is not recommended, as you can only - get the state of jobs that are running in ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_get_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobRequest( - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetJobRequest, dict]]): - The request object. Request to get the state of a Cloud - Dataflow job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - # Create or coerce a protobuf request object. - request = jobs.GetJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_job(self, - request: Optional[Union[jobs.UpdateJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Updates the state of an existing Cloud Dataflow job. - - To update the state of an existing job, we recommend using - ``projects.locations.jobs.update`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.update`` is not recommended, as you can - only update the state of jobs that are running in - ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_update_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.UpdateJobRequest( - ) - - # Make the request - response = await client.update_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.UpdateJobRequest, dict]]): - The request object. Request to update a Cloud Dataflow - job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - # Create or coerce a protobuf request object. - request = jobs.UpdateJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_job, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: Optional[Union[jobs.ListJobsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""List the jobs of a project. - - To list the jobs of a project in a region, we recommend using - ``projects.locations.jobs.list`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - To list the all jobs across all regions, use - ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is - not recommended, as you can only get the list of jobs that are - running in ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_list_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.ListJobsRequest, dict]]): - The request object. Request to list Cloud Dataflow jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager: - Response to a request to list Cloud - Dataflow jobs in a project. This might - be a partial response, depending on the - page size in the ListJobsRequest. - However, if the project does not have - any jobs, an instance of - ListJobsResponse is not returned and the - requests's response body is empty {}. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = jobs.ListJobsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_jobs, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def aggregated_list_jobs(self, - request: Optional[Union[jobs.ListJobsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.AggregatedListJobsAsyncPager: - r"""List the jobs of a project across all regions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_aggregated_list_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobsRequest( - ) - - # Make the request - page_result = client.aggregated_list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.ListJobsRequest, dict]]): - The request object. Request to list Cloud Dataflow jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsAsyncPager: - Response to a request to list Cloud - Dataflow jobs in a project. This might - be a partial response, depending on the - page size in the ListJobsRequest. - However, if the project does not have - any jobs, an instance of - ListJobsResponse is not returned and the - requests's response body is empty {}. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = jobs.ListJobsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.aggregated_list_jobs, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.AggregatedListJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def check_active_jobs(self, - request: Optional[Union[jobs.CheckActiveJobsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.CheckActiveJobsResponse: - r"""Check for existence of active jobs in the given - project across all regions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_check_active_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.CheckActiveJobsRequest( - ) - - # Make the request - response = await client.check_active_jobs(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest, dict]]): - The request object. Request to check is active jobs - exists for a project - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse: - Response for CheckActiveJobsRequest. - """ - # Create or coerce a protobuf request object. - request = jobs.CheckActiveJobsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.check_active_jobs, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def snapshot_job(self, - request: Optional[Union[jobs.SnapshotJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshots.Snapshot: - r"""Snapshot the state of a streaming job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_snapshot_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.SnapshotJobRequest( - ) - - # Make the request - response = await client.snapshot_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.SnapshotJobRequest, dict]]): - The request object. Request to create a snapshot of a - job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Snapshot: - Represents a snapshot of a job. - """ - # Create or coerce a protobuf request object. - request = jobs.SnapshotJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.snapshot_job, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "JobsV1Beta3AsyncClient", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py deleted file mode 100644 index 37d535d..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/client.py +++ /dev/null @@ -1,1027 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.types import environment -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import snapshots -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .transports.grpc import JobsV1Beta3GrpcTransport -from .transports.grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport -from .transports.rest import JobsV1Beta3RestTransport - - -class JobsV1Beta3ClientMeta(type): - """Metaclass for the JobsV1Beta3 client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] - _transport_registry["grpc"] = JobsV1Beta3GrpcTransport - _transport_registry["grpc_asyncio"] = JobsV1Beta3GrpcAsyncIOTransport - _transport_registry["rest"] = JobsV1Beta3RestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[JobsV1Beta3Transport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class JobsV1Beta3Client(metaclass=JobsV1Beta3ClientMeta): - """Provides a method to create and modify Google Cloud Dataflow - jobs. A Job is a multi-stage computation graph run by the Cloud - Dataflow service. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataflow.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobsV1Beta3Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - JobsV1Beta3Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> JobsV1Beta3Transport: - """Returns the transport used by the client instance. - - Returns: - JobsV1Beta3Transport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, JobsV1Beta3Transport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the jobs v1 beta3 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, JobsV1Beta3Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, JobsV1Beta3Transport): - # transport is a JobsV1Beta3Transport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_job(self, - request: Optional[Union[jobs.CreateJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Creates a Cloud Dataflow job. - - To create a job, we recommend using - ``projects.locations.jobs.create`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.create`` is not recommended, as your job - will always start in ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_create_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.CreateJobRequest( - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.CreateJobRequest, dict]): - The request object. Request to create a Cloud Dataflow - job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a jobs.CreateJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.CreateJobRequest): - request = jobs.CreateJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: Optional[Union[jobs.GetJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Gets the state of the specified Cloud Dataflow job. - - To get the state of a job, we recommend using - ``projects.locations.jobs.get`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.get`` is not recommended, as you can only - get the state of jobs that are running in ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_get_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobRequest( - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.GetJobRequest, dict]): - The request object. Request to get the state of a Cloud - Dataflow job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a jobs.GetJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.GetJobRequest): - request = jobs.GetJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_job(self, - request: Optional[Union[jobs.UpdateJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Updates the state of an existing Cloud Dataflow job. - - To update the state of an existing job, we recommend using - ``projects.locations.jobs.update`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.update`` is not recommended, as you can - only update the state of jobs that are running in - ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_update_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.UpdateJobRequest( - ) - - # Make the request - response = client.update_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.UpdateJobRequest, dict]): - The request object. Request to update a Cloud Dataflow - job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a jobs.UpdateJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.UpdateJobRequest): - request = jobs.UpdateJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: Optional[Union[jobs.ListJobsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsPager: - r"""List the jobs of a project. - - To list the jobs of a project in a region, we recommend using - ``projects.locations.jobs.list`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - To list the all jobs across all regions, use - ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is - not recommended, as you can only get the list of jobs that are - running in ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_list_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.ListJobsRequest, dict]): - The request object. Request to list Cloud Dataflow jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager: - Response to a request to list Cloud - Dataflow jobs in a project. This might - be a partial response, depending on the - page size in the ListJobsRequest. - However, if the project does not have - any jobs, an instance of - ListJobsResponse is not returned and the - requests's response body is empty {}. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a jobs.ListJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.ListJobsRequest): - request = jobs.ListJobsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def aggregated_list_jobs(self, - request: Optional[Union[jobs.ListJobsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.AggregatedListJobsPager: - r"""List the jobs of a project across all regions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_aggregated_list_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobsRequest( - ) - - # Make the request - page_result = client.aggregated_list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.ListJobsRequest, dict]): - The request object. Request to list Cloud Dataflow jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager: - Response to a request to list Cloud - Dataflow jobs in a project. This might - be a partial response, depending on the - page size in the ListJobsRequest. - However, if the project does not have - any jobs, an instance of - ListJobsResponse is not returned and the - requests's response body is empty {}. - - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a jobs.ListJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.ListJobsRequest): - request = jobs.ListJobsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.aggregated_list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.AggregatedListJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def check_active_jobs(self, - request: Optional[Union[jobs.CheckActiveJobsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.CheckActiveJobsResponse: - r"""Check for existence of active jobs in the given - project across all regions. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_check_active_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.CheckActiveJobsRequest( - ) - - # Make the request - response = client.check_active_jobs(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest, dict]): - The request object. Request to check is active jobs - exists for a project - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse: - Response for CheckActiveJobsRequest. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a jobs.CheckActiveJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.CheckActiveJobsRequest): - request = jobs.CheckActiveJobsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.check_active_jobs] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def snapshot_job(self, - request: Optional[Union[jobs.SnapshotJobRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshots.Snapshot: - r"""Snapshot the state of a streaming job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_snapshot_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.SnapshotJobRequest( - ) - - # Make the request - response = client.snapshot_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.SnapshotJobRequest, dict]): - The request object. Request to create a snapshot of a - job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Snapshot: - Represents a snapshot of a job. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a jobs.SnapshotJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, jobs.SnapshotJobRequest): - request = jobs.SnapshotJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.snapshot_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "JobsV1Beta3Client", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py deleted file mode 100644 index 2f8687c..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/pagers.py +++ /dev/null @@ -1,260 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dataflow_v1beta3.types import jobs - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., jobs.ListJobsResponse], - request: jobs.ListJobsRequest, - response: jobs.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = jobs.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[jobs.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[jobs.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[jobs.ListJobsResponse]], - request: jobs.ListJobsRequest, - response: jobs.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = jobs.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[jobs.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[jobs.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class AggregatedListJobsPager: - """A pager for iterating through ``aggregated_list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``AggregatedListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., jobs.ListJobsResponse], - request: jobs.ListJobsRequest, - response: jobs.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = jobs.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[jobs.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[jobs.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class AggregatedListJobsAsyncPager: - """A pager for iterating through ``aggregated_list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``AggregatedListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[jobs.ListJobsResponse]], - request: jobs.ListJobsRequest, - response: jobs.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.ListJobsRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = jobs.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[jobs.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[jobs.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py deleted file mode 100644 index 8dcbf32..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import JobsV1Beta3Transport -from .grpc import JobsV1Beta3GrpcTransport -from .grpc_asyncio import JobsV1Beta3GrpcAsyncIOTransport -from .rest import JobsV1Beta3RestTransport -from .rest import JobsV1Beta3RestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[JobsV1Beta3Transport]] -_transport_registry['grpc'] = JobsV1Beta3GrpcTransport -_transport_registry['grpc_asyncio'] = JobsV1Beta3GrpcAsyncIOTransport -_transport_registry['rest'] = JobsV1Beta3RestTransport - -__all__ = ( - 'JobsV1Beta3Transport', - 'JobsV1Beta3GrpcTransport', - 'JobsV1Beta3GrpcAsyncIOTransport', - 'JobsV1Beta3RestTransport', - 'JobsV1Beta3RestInterceptor', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py deleted file mode 100644 index b581aa4..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/base.py +++ /dev/null @@ -1,236 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import snapshots - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class JobsV1Beta3Transport(abc.ABC): - """Abstract transport class for JobsV1Beta3.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', - ) - - DEFAULT_HOST: str = 'dataflow.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_job: gapic_v1.method.wrap_method( - self.create_job, - default_timeout=None, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_timeout=None, - client_info=client_info, - ), - self.update_job: gapic_v1.method.wrap_method( - self.update_job, - default_timeout=None, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_timeout=None, - client_info=client_info, - ), - self.aggregated_list_jobs: gapic_v1.method.wrap_method( - self.aggregated_list_jobs, - default_timeout=None, - client_info=client_info, - ), - self.check_active_jobs: gapic_v1.method.wrap_method( - self.check_active_jobs, - default_timeout=None, - client_info=client_info, - ), - self.snapshot_job: gapic_v1.method.wrap_method( - self.snapshot_job, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_job(self) -> Callable[ - [jobs.CreateJobRequest], - Union[ - jobs.Job, - Awaitable[jobs.Job] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [jobs.GetJobRequest], - Union[ - jobs.Job, - Awaitable[jobs.Job] - ]]: - raise NotImplementedError() - - @property - def update_job(self) -> Callable[ - [jobs.UpdateJobRequest], - Union[ - jobs.Job, - Awaitable[jobs.Job] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - Union[ - jobs.ListJobsResponse, - Awaitable[jobs.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def aggregated_list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - Union[ - jobs.ListJobsResponse, - Awaitable[jobs.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def check_active_jobs(self) -> Callable[ - [jobs.CheckActiveJobsRequest], - Union[ - jobs.CheckActiveJobsResponse, - Awaitable[jobs.CheckActiveJobsResponse] - ]]: - raise NotImplementedError() - - @property - def snapshot_job(self) -> Callable[ - [jobs.SnapshotJobRequest], - Union[ - snapshots.Snapshot, - Awaitable[snapshots.Snapshot] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'JobsV1Beta3Transport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py deleted file mode 100644 index 9949ea8..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc.py +++ /dev/null @@ -1,451 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import snapshots -from .base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO - - -class JobsV1Beta3GrpcTransport(JobsV1Beta3Transport): - """gRPC backend transport for JobsV1Beta3. - - Provides a method to create and modify Google Cloud Dataflow - jobs. A Job is a multi-stage computation graph run by the Cloud - Dataflow service. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_job(self) -> Callable[ - [jobs.CreateJobRequest], - jobs.Job]: - r"""Return a callable for the create job method over gRPC. - - Creates a Cloud Dataflow job. - - To create a job, we recommend using - ``projects.locations.jobs.create`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.create`` is not recommended, as your job - will always start in ``us-central1``. - - Returns: - Callable[[~.CreateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/CreateJob', - request_serializer=jobs.CreateJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def get_job(self) -> Callable[ - [jobs.GetJobRequest], - jobs.Job]: - r"""Return a callable for the get job method over gRPC. - - Gets the state of the specified Cloud Dataflow job. - - To get the state of a job, we recommend using - ``projects.locations.jobs.get`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.get`` is not recommended, as you can only - get the state of jobs that are running in ``us-central1``. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/GetJob', - request_serializer=jobs.GetJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def update_job(self) -> Callable[ - [jobs.UpdateJobRequest], - jobs.Job]: - r"""Return a callable for the update job method over gRPC. - - Updates the state of an existing Cloud Dataflow job. - - To update the state of an existing job, we recommend using - ``projects.locations.jobs.update`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.update`` is not recommended, as you can - only update the state of jobs that are running in - ``us-central1``. - - Returns: - Callable[[~.UpdateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job' not in self._stubs: - self._stubs['update_job'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/UpdateJob', - request_serializer=jobs.UpdateJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['update_job'] - - @property - def list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - jobs.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - List the jobs of a project. - - To list the jobs of a project in a region, we recommend using - ``projects.locations.jobs.list`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - To list the all jobs across all regions, use - ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is - not recommended, as you can only get the list of jobs that are - running in ``us-central1``. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/ListJobs', - request_serializer=jobs.ListJobsRequest.serialize, - response_deserializer=jobs.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def aggregated_list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - jobs.ListJobsResponse]: - r"""Return a callable for the aggregated list jobs method over gRPC. - - List the jobs of a project across all regions. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'aggregated_list_jobs' not in self._stubs: - self._stubs['aggregated_list_jobs'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/AggregatedListJobs', - request_serializer=jobs.ListJobsRequest.serialize, - response_deserializer=jobs.ListJobsResponse.deserialize, - ) - return self._stubs['aggregated_list_jobs'] - - @property - def check_active_jobs(self) -> Callable[ - [jobs.CheckActiveJobsRequest], - jobs.CheckActiveJobsResponse]: - r"""Return a callable for the check active jobs method over gRPC. - - Check for existence of active jobs in the given - project across all regions. - - Returns: - Callable[[~.CheckActiveJobsRequest], - ~.CheckActiveJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'check_active_jobs' not in self._stubs: - self._stubs['check_active_jobs'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/CheckActiveJobs', - request_serializer=jobs.CheckActiveJobsRequest.serialize, - response_deserializer=jobs.CheckActiveJobsResponse.deserialize, - ) - return self._stubs['check_active_jobs'] - - @property - def snapshot_job(self) -> Callable[ - [jobs.SnapshotJobRequest], - snapshots.Snapshot]: - r"""Return a callable for the snapshot job method over gRPC. - - Snapshot the state of a streaming job. - - Returns: - Callable[[~.SnapshotJobRequest], - ~.Snapshot]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'snapshot_job' not in self._stubs: - self._stubs['snapshot_job'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/SnapshotJob', - request_serializer=jobs.SnapshotJobRequest.serialize, - response_deserializer=snapshots.Snapshot.deserialize, - ) - return self._stubs['snapshot_job'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'JobsV1Beta3GrpcTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py deleted file mode 100644 index 5ac3c50..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py +++ /dev/null @@ -1,450 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import snapshots -from .base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .grpc import JobsV1Beta3GrpcTransport - - -class JobsV1Beta3GrpcAsyncIOTransport(JobsV1Beta3Transport): - """gRPC AsyncIO backend transport for JobsV1Beta3. - - Provides a method to create and modify Google Cloud Dataflow - jobs. A Job is a multi-stage computation graph run by the Cloud - Dataflow service. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_job(self) -> Callable[ - [jobs.CreateJobRequest], - Awaitable[jobs.Job]]: - r"""Return a callable for the create job method over gRPC. - - Creates a Cloud Dataflow job. - - To create a job, we recommend using - ``projects.locations.jobs.create`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.create`` is not recommended, as your job - will always start in ``us-central1``. - - Returns: - Callable[[~.CreateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/CreateJob', - request_serializer=jobs.CreateJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def get_job(self) -> Callable[ - [jobs.GetJobRequest], - Awaitable[jobs.Job]]: - r"""Return a callable for the get job method over gRPC. - - Gets the state of the specified Cloud Dataflow job. - - To get the state of a job, we recommend using - ``projects.locations.jobs.get`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.get`` is not recommended, as you can only - get the state of jobs that are running in ``us-central1``. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/GetJob', - request_serializer=jobs.GetJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def update_job(self) -> Callable[ - [jobs.UpdateJobRequest], - Awaitable[jobs.Job]]: - r"""Return a callable for the update job method over gRPC. - - Updates the state of an existing Cloud Dataflow job. - - To update the state of an existing job, we recommend using - ``projects.locations.jobs.update`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.update`` is not recommended, as you can - only update the state of jobs that are running in - ``us-central1``. - - Returns: - Callable[[~.UpdateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job' not in self._stubs: - self._stubs['update_job'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/UpdateJob', - request_serializer=jobs.UpdateJobRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['update_job'] - - @property - def list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - Awaitable[jobs.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - List the jobs of a project. - - To list the jobs of a project in a region, we recommend using - ``projects.locations.jobs.list`` with a [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - To list the all jobs across all regions, use - ``projects.jobs.aggregated``. Using ``projects.jobs.list`` is - not recommended, as you can only get the list of jobs that are - running in ``us-central1``. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/ListJobs', - request_serializer=jobs.ListJobsRequest.serialize, - response_deserializer=jobs.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def aggregated_list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - Awaitable[jobs.ListJobsResponse]]: - r"""Return a callable for the aggregated list jobs method over gRPC. - - List the jobs of a project across all regions. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'aggregated_list_jobs' not in self._stubs: - self._stubs['aggregated_list_jobs'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/AggregatedListJobs', - request_serializer=jobs.ListJobsRequest.serialize, - response_deserializer=jobs.ListJobsResponse.deserialize, - ) - return self._stubs['aggregated_list_jobs'] - - @property - def check_active_jobs(self) -> Callable[ - [jobs.CheckActiveJobsRequest], - Awaitable[jobs.CheckActiveJobsResponse]]: - r"""Return a callable for the check active jobs method over gRPC. - - Check for existence of active jobs in the given - project across all regions. - - Returns: - Callable[[~.CheckActiveJobsRequest], - Awaitable[~.CheckActiveJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'check_active_jobs' not in self._stubs: - self._stubs['check_active_jobs'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/CheckActiveJobs', - request_serializer=jobs.CheckActiveJobsRequest.serialize, - response_deserializer=jobs.CheckActiveJobsResponse.deserialize, - ) - return self._stubs['check_active_jobs'] - - @property - def snapshot_job(self) -> Callable[ - [jobs.SnapshotJobRequest], - Awaitable[snapshots.Snapshot]]: - r"""Return a callable for the snapshot job method over gRPC. - - Snapshot the state of a streaming job. - - Returns: - Callable[[~.SnapshotJobRequest], - Awaitable[~.Snapshot]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'snapshot_job' not in self._stubs: - self._stubs['snapshot_job'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.JobsV1Beta3/SnapshotJob', - request_serializer=jobs.SnapshotJobRequest.serialize, - response_deserializer=snapshots.Snapshot.deserialize, - ) - return self._stubs['snapshot_job'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'JobsV1Beta3GrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py deleted file mode 100644 index 75f3281..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/rest.py +++ /dev/null @@ -1,902 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import snapshots - -from .base import JobsV1Beta3Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class JobsV1Beta3RestInterceptor: - """Interceptor for JobsV1Beta3. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the JobsV1Beta3RestTransport. - - .. code-block:: python - class MyCustomJobsV1Beta3Interceptor(JobsV1Beta3RestInterceptor): - def pre_aggregated_list_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_aggregated_list_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_check_active_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_check_active_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_jobs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_jobs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_snapshot_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_snapshot_job(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_job(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_job(self, response): - logging.log(f"Received response: {response}") - return response - - transport = JobsV1Beta3RestTransport(interceptor=MyCustomJobsV1Beta3Interceptor()) - client = JobsV1Beta3Client(transport=transport) - - - """ - def pre_aggregated_list_jobs(self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for aggregated_list_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobsV1Beta3 server. - """ - return request, metadata - - def post_aggregated_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: - """Post-rpc interceptor for aggregated_list_jobs - - Override in a subclass to manipulate the response - after it is returned by the JobsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_create_job(self, request: jobs.CreateJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.CreateJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobsV1Beta3 server. - """ - return request, metadata - - def post_create_job(self, response: jobs.Job) -> jobs.Job: - """Post-rpc interceptor for create_job - - Override in a subclass to manipulate the response - after it is returned by the JobsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_get_job(self, request: jobs.GetJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.GetJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobsV1Beta3 server. - """ - return request, metadata - - def post_get_job(self, response: jobs.Job) -> jobs.Job: - """Post-rpc interceptor for get_job - - Override in a subclass to manipulate the response - after it is returned by the JobsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_list_jobs(self, request: jobs.ListJobsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.ListJobsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_jobs - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobsV1Beta3 server. - """ - return request, metadata - - def post_list_jobs(self, response: jobs.ListJobsResponse) -> jobs.ListJobsResponse: - """Post-rpc interceptor for list_jobs - - Override in a subclass to manipulate the response - after it is returned by the JobsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_snapshot_job(self, request: jobs.SnapshotJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.SnapshotJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for snapshot_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobsV1Beta3 server. - """ - return request, metadata - - def post_snapshot_job(self, response: snapshots.Snapshot) -> snapshots.Snapshot: - """Post-rpc interceptor for snapshot_job - - Override in a subclass to manipulate the response - after it is returned by the JobsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_update_job(self, request: jobs.UpdateJobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[jobs.UpdateJobRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_job - - Override in a subclass to manipulate the request or metadata - before they are sent to the JobsV1Beta3 server. - """ - return request, metadata - - def post_update_job(self, response: jobs.Job) -> jobs.Job: - """Post-rpc interceptor for update_job - - Override in a subclass to manipulate the response - after it is returned by the JobsV1Beta3 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class JobsV1Beta3RestStub: - _session: AuthorizedSession - _host: str - _interceptor: JobsV1Beta3RestInterceptor - - -class JobsV1Beta3RestTransport(JobsV1Beta3Transport): - """REST backend transport for JobsV1Beta3. - - Provides a method to create and modify Google Cloud Dataflow - jobs. A Job is a multi-stage computation graph run by the Cloud - Dataflow service. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[JobsV1Beta3RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or JobsV1Beta3RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _AggregatedListJobs(JobsV1Beta3RestStub): - def __hash__(self): - return hash("AggregatedListJobs") - - def __call__(self, - request: jobs.ListJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.ListJobsResponse: - r"""Call the aggregated list jobs method over HTTP. - - Args: - request (~.jobs.ListJobsRequest): - The request object. Request to list Cloud Dataflow jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.ListJobsResponse: - Response to a request to list Cloud - Dataflow jobs in a project. This might - be a partial response, depending on the - page size in the ListJobsRequest. - However, if the project does not have - any jobs, an instance of - ListJobsResponse is not returned and the - requests's response body is empty {}. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/jobs:aggregated', - }, - ] - request, metadata = self._interceptor.pre_aggregated_list_jobs(request, metadata) - pb_request = jobs.ListJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.ListJobsResponse() - pb_resp = jobs.ListJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_aggregated_list_jobs(resp) - return resp - - class _CheckActiveJobs(JobsV1Beta3RestStub): - def __hash__(self): - return hash("CheckActiveJobs") - - def __call__(self, - request: jobs.CheckActiveJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.CheckActiveJobsResponse: - raise RuntimeError( - "Cannot define a method without a valid 'google.api.http' annotation.") - - class _CreateJob(JobsV1Beta3RestStub): - def __hash__(self): - return hash("CreateJob") - - def __call__(self, - request: jobs.CreateJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.Job: - r"""Call the create job method over HTTP. - - Args: - request (~.jobs.CreateJobRequest): - The request object. Request to create a Cloud Dataflow - job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs', - 'body': 'job', - }, -{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/jobs', - 'body': 'job', - }, - ] - request, metadata = self._interceptor.pre_create_job(request, metadata) - pb_request = jobs.CreateJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.Job() - pb_resp = jobs.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_job(resp) - return resp - - class _GetJob(JobsV1Beta3RestStub): - def __hash__(self): - return hash("GetJob") - - def __call__(self, - request: jobs.GetJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.Job: - r"""Call the get job method over HTTP. - - Args: - request (~.jobs.GetJobRequest): - The request object. Request to get the state of a Cloud - Dataflow job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}', - }, -{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}', - }, - ] - request, metadata = self._interceptor.pre_get_job(request, metadata) - pb_request = jobs.GetJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.Job() - pb_resp = jobs.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_job(resp) - return resp - - class _ListJobs(JobsV1Beta3RestStub): - def __hash__(self): - return hash("ListJobs") - - def __call__(self, - request: jobs.ListJobsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.ListJobsResponse: - r"""Call the list jobs method over HTTP. - - Args: - request (~.jobs.ListJobsRequest): - The request object. Request to list Cloud Dataflow jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.ListJobsResponse: - Response to a request to list Cloud - Dataflow jobs in a project. This might - be a partial response, depending on the - page size in the ListJobsRequest. - However, if the project does not have - any jobs, an instance of - ListJobsResponse is not returned and the - requests's response body is empty {}. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs', - }, -{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/jobs', - }, - ] - request, metadata = self._interceptor.pre_list_jobs(request, metadata) - pb_request = jobs.ListJobsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.ListJobsResponse() - pb_resp = jobs.ListJobsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_jobs(resp) - return resp - - class _SnapshotJob(JobsV1Beta3RestStub): - def __hash__(self): - return hash("SnapshotJob") - - def __call__(self, - request: jobs.SnapshotJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> snapshots.Snapshot: - r"""Call the snapshot job method over HTTP. - - Args: - request (~.jobs.SnapshotJobRequest): - The request object. Request to create a snapshot of a - job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.snapshots.Snapshot: - Represents a snapshot of a job. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}:snapshot', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}:snapshot', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_snapshot_job(request, metadata) - pb_request = jobs.SnapshotJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = snapshots.Snapshot() - pb_resp = snapshots.Snapshot.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_snapshot_job(resp) - return resp - - class _UpdateJob(JobsV1Beta3RestStub): - def __hash__(self): - return hash("UpdateJob") - - def __call__(self, - request: jobs.UpdateJobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.Job: - r"""Call the update job method over HTTP. - - Args: - request (~.jobs.UpdateJobRequest): - The request object. Request to update a Cloud Dataflow - job. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'put', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}', - 'body': 'job', - }, -{ - 'method': 'put', - 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}', - 'body': 'job', - }, - ] - request, metadata = self._interceptor.pre_update_job(request, metadata) - pb_request = jobs.UpdateJobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.Job() - pb_resp = jobs.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_job(resp) - return resp - - @property - def aggregated_list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - jobs.ListJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._AggregatedListJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def check_active_jobs(self) -> Callable[ - [jobs.CheckActiveJobsRequest], - jobs.CheckActiveJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CheckActiveJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_job(self) -> Callable[ - [jobs.CreateJobRequest], - jobs.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job(self) -> Callable[ - [jobs.GetJobRequest], - jobs.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_jobs(self) -> Callable[ - [jobs.ListJobsRequest], - jobs.ListJobsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobs(self._session, self._host, self._interceptor) # type: ignore - - @property - def snapshot_job(self) -> Callable[ - [jobs.SnapshotJobRequest], - snapshots.Snapshot]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._SnapshotJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_job(self) -> Callable[ - [jobs.UpdateJobRequest], - jobs.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateJob(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'JobsV1Beta3RestTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py deleted file mode 100644 index c3ca155..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MessagesV1Beta3Client -from .async_client import MessagesV1Beta3AsyncClient - -__all__ = ( - 'MessagesV1Beta3Client', - 'MessagesV1Beta3AsyncClient', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py deleted file mode 100644 index 906666f..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/async_client.py +++ /dev/null @@ -1,304 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.types import messages -from .transports.base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport -from .client import MessagesV1Beta3Client - - -class MessagesV1Beta3AsyncClient: - """The Dataflow Messages API is used for monitoring the progress - of Dataflow jobs. - """ - - _client: MessagesV1Beta3Client - - DEFAULT_ENDPOINT = MessagesV1Beta3Client.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MessagesV1Beta3Client.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(MessagesV1Beta3Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MessagesV1Beta3Client.parse_common_billing_account_path) - common_folder_path = staticmethod(MessagesV1Beta3Client.common_folder_path) - parse_common_folder_path = staticmethod(MessagesV1Beta3Client.parse_common_folder_path) - common_organization_path = staticmethod(MessagesV1Beta3Client.common_organization_path) - parse_common_organization_path = staticmethod(MessagesV1Beta3Client.parse_common_organization_path) - common_project_path = staticmethod(MessagesV1Beta3Client.common_project_path) - parse_common_project_path = staticmethod(MessagesV1Beta3Client.parse_common_project_path) - common_location_path = staticmethod(MessagesV1Beta3Client.common_location_path) - parse_common_location_path = staticmethod(MessagesV1Beta3Client.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MessagesV1Beta3AsyncClient: The constructed client. - """ - return MessagesV1Beta3Client.from_service_account_info.__func__(MessagesV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MessagesV1Beta3AsyncClient: The constructed client. - """ - return MessagesV1Beta3Client.from_service_account_file.__func__(MessagesV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return MessagesV1Beta3Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> MessagesV1Beta3Transport: - """Returns the transport used by the client instance. - - Returns: - MessagesV1Beta3Transport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(MessagesV1Beta3Client).get_transport_class, type(MessagesV1Beta3Client)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MessagesV1Beta3Transport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the messages v1 beta3 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.MessagesV1Beta3Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MessagesV1Beta3Client( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def list_job_messages(self, - request: Optional[Union[messages.ListJobMessagesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobMessagesAsyncPager: - r"""Request the job status. - - To request the status of a job, we recommend using - ``projects.locations.jobs.messages.list`` with a [regional - endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.messages.list`` is not recommended, as you - can only request the status of jobs that are running in - ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_list_job_messages(): - # Create a client - client = dataflow_v1beta3.MessagesV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobMessagesRequest( - ) - - # Make the request - page_result = client.list_job_messages(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest, dict]]): - The request object. Request to list job messages. - Up to max_results messages will be returned in the time - range specified starting with the oldest messages first. - If no time range is specified the results with start - with the oldest message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager: - Response to a request to list job - messages. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = messages.ListJobMessagesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_messages, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobMessagesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MessagesV1Beta3AsyncClient", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py deleted file mode 100644 index 4d4c75a..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/client.py +++ /dev/null @@ -1,500 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.types import messages -from .transports.base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO -from .transports.grpc import MessagesV1Beta3GrpcTransport -from .transports.grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport -from .transports.rest import MessagesV1Beta3RestTransport - - -class MessagesV1Beta3ClientMeta(type): - """Metaclass for the MessagesV1Beta3 client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MessagesV1Beta3Transport]] - _transport_registry["grpc"] = MessagesV1Beta3GrpcTransport - _transport_registry["grpc_asyncio"] = MessagesV1Beta3GrpcAsyncIOTransport - _transport_registry["rest"] = MessagesV1Beta3RestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MessagesV1Beta3Transport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MessagesV1Beta3Client(metaclass=MessagesV1Beta3ClientMeta): - """The Dataflow Messages API is used for monitoring the progress - of Dataflow jobs. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataflow.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MessagesV1Beta3Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MessagesV1Beta3Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MessagesV1Beta3Transport: - """Returns the transport used by the client instance. - - Returns: - MessagesV1Beta3Transport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MessagesV1Beta3Transport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the messages v1 beta3 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, MessagesV1Beta3Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, MessagesV1Beta3Transport): - # transport is a MessagesV1Beta3Transport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def list_job_messages(self, - request: Optional[Union[messages.ListJobMessagesRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobMessagesPager: - r"""Request the job status. - - To request the status of a job, we recommend using - ``projects.locations.jobs.messages.list`` with a [regional - endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.messages.list`` is not recommended, as you - can only request the status of jobs that are running in - ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_list_job_messages(): - # Create a client - client = dataflow_v1beta3.MessagesV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobMessagesRequest( - ) - - # Make the request - page_result = client.list_job_messages(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest, dict]): - The request object. Request to list job messages. - Up to max_results messages will be returned in the time - range specified starting with the oldest messages first. - If no time range is specified the results with start - with the oldest message. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager: - Response to a request to list job - messages. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a messages.ListJobMessagesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, messages.ListJobMessagesRequest): - request = messages.ListJobMessagesRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_messages] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobMessagesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MessagesV1Beta3Client", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py deleted file mode 100644 index 7ccd2c7..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/pagers.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dataflow_v1beta3.types import messages - - -class ListJobMessagesPager: - """A pager for iterating through ``list_job_messages`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_messages`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobMessages`` requests and continue to iterate - through the ``job_messages`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., messages.ListJobMessagesResponse], - request: messages.ListJobMessagesRequest, - response: messages.ListJobMessagesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = messages.ListJobMessagesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[messages.ListJobMessagesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[messages.JobMessage]: - for page in self.pages: - yield from page.job_messages - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobMessagesAsyncPager: - """A pager for iterating through ``list_job_messages`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_messages`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobMessages`` requests and continue to iterate - through the ``job_messages`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[messages.ListJobMessagesResponse]], - request: messages.ListJobMessagesRequest, - response: messages.ListJobMessagesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.ListJobMessagesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = messages.ListJobMessagesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[messages.ListJobMessagesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[messages.JobMessage]: - async def async_generator(): - async for page in self.pages: - for response in page.job_messages: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py deleted file mode 100644 index e5a2058..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MessagesV1Beta3Transport -from .grpc import MessagesV1Beta3GrpcTransport -from .grpc_asyncio import MessagesV1Beta3GrpcAsyncIOTransport -from .rest import MessagesV1Beta3RestTransport -from .rest import MessagesV1Beta3RestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MessagesV1Beta3Transport]] -_transport_registry['grpc'] = MessagesV1Beta3GrpcTransport -_transport_registry['grpc_asyncio'] = MessagesV1Beta3GrpcAsyncIOTransport -_transport_registry['rest'] = MessagesV1Beta3RestTransport - -__all__ = ( - 'MessagesV1Beta3Transport', - 'MessagesV1Beta3GrpcTransport', - 'MessagesV1Beta3GrpcAsyncIOTransport', - 'MessagesV1Beta3RestTransport', - 'MessagesV1Beta3RestInterceptor', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py deleted file mode 100644 index d416b54..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/base.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataflow_v1beta3.types import messages - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class MessagesV1Beta3Transport(abc.ABC): - """Abstract transport class for MessagesV1Beta3.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', - ) - - DEFAULT_HOST: str = 'dataflow.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.list_job_messages: gapic_v1.method.wrap_method( - self.list_job_messages, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def list_job_messages(self) -> Callable[ - [messages.ListJobMessagesRequest], - Union[ - messages.ListJobMessagesResponse, - Awaitable[messages.ListJobMessagesResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'MessagesV1Beta3Transport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py deleted file mode 100644 index 6028ead..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc.py +++ /dev/null @@ -1,273 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataflow_v1beta3.types import messages -from .base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO - - -class MessagesV1Beta3GrpcTransport(MessagesV1Beta3Transport): - """gRPC backend transport for MessagesV1Beta3. - - The Dataflow Messages API is used for monitoring the progress - of Dataflow jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def list_job_messages(self) -> Callable[ - [messages.ListJobMessagesRequest], - messages.ListJobMessagesResponse]: - r"""Return a callable for the list job messages method over gRPC. - - Request the job status. - - To request the status of a job, we recommend using - ``projects.locations.jobs.messages.list`` with a [regional - endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.messages.list`` is not recommended, as you - can only request the status of jobs that are running in - ``us-central1``. - - Returns: - Callable[[~.ListJobMessagesRequest], - ~.ListJobMessagesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_messages' not in self._stubs: - self._stubs['list_job_messages'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.MessagesV1Beta3/ListJobMessages', - request_serializer=messages.ListJobMessagesRequest.serialize, - response_deserializer=messages.ListJobMessagesResponse.deserialize, - ) - return self._stubs['list_job_messages'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'MessagesV1Beta3GrpcTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py deleted file mode 100644 index 0778c7b..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/grpc_asyncio.py +++ /dev/null @@ -1,272 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataflow_v1beta3.types import messages -from .base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO -from .grpc import MessagesV1Beta3GrpcTransport - - -class MessagesV1Beta3GrpcAsyncIOTransport(MessagesV1Beta3Transport): - """gRPC AsyncIO backend transport for MessagesV1Beta3. - - The Dataflow Messages API is used for monitoring the progress - of Dataflow jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def list_job_messages(self) -> Callable[ - [messages.ListJobMessagesRequest], - Awaitable[messages.ListJobMessagesResponse]]: - r"""Return a callable for the list job messages method over gRPC. - - Request the job status. - - To request the status of a job, we recommend using - ``projects.locations.jobs.messages.list`` with a [regional - endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.messages.list`` is not recommended, as you - can only request the status of jobs that are running in - ``us-central1``. - - Returns: - Callable[[~.ListJobMessagesRequest], - Awaitable[~.ListJobMessagesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_messages' not in self._stubs: - self._stubs['list_job_messages'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.MessagesV1Beta3/ListJobMessages', - request_serializer=messages.ListJobMessagesRequest.serialize, - response_deserializer=messages.ListJobMessagesResponse.deserialize, - ) - return self._stubs['list_job_messages'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'MessagesV1Beta3GrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py deleted file mode 100644 index 29e415e..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/messages_v1_beta3/transports/rest.py +++ /dev/null @@ -1,292 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataflow_v1beta3.types import messages - -from .base import MessagesV1Beta3Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class MessagesV1Beta3RestInterceptor: - """Interceptor for MessagesV1Beta3. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the MessagesV1Beta3RestTransport. - - .. code-block:: python - class MyCustomMessagesV1Beta3Interceptor(MessagesV1Beta3RestInterceptor): - def pre_list_job_messages(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_job_messages(self, response): - logging.log(f"Received response: {response}") - return response - - transport = MessagesV1Beta3RestTransport(interceptor=MyCustomMessagesV1Beta3Interceptor()) - client = MessagesV1Beta3Client(transport=transport) - - - """ - def pre_list_job_messages(self, request: messages.ListJobMessagesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[messages.ListJobMessagesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_job_messages - - Override in a subclass to manipulate the request or metadata - before they are sent to the MessagesV1Beta3 server. - """ - return request, metadata - - def post_list_job_messages(self, response: messages.ListJobMessagesResponse) -> messages.ListJobMessagesResponse: - """Post-rpc interceptor for list_job_messages - - Override in a subclass to manipulate the response - after it is returned by the MessagesV1Beta3 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class MessagesV1Beta3RestStub: - _session: AuthorizedSession - _host: str - _interceptor: MessagesV1Beta3RestInterceptor - - -class MessagesV1Beta3RestTransport(MessagesV1Beta3Transport): - """REST backend transport for MessagesV1Beta3. - - The Dataflow Messages API is used for monitoring the progress - of Dataflow jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MessagesV1Beta3RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or MessagesV1Beta3RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _ListJobMessages(MessagesV1Beta3RestStub): - def __hash__(self): - return hash("ListJobMessages") - - def __call__(self, - request: messages.ListJobMessagesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> messages.ListJobMessagesResponse: - r"""Call the list job messages method over HTTP. - - Args: - request (~.messages.ListJobMessagesRequest): - The request object. Request to list job messages. Up to max_results messages - will be returned in the time range specified starting - with the oldest messages first. If no time range is - specified the results with start with the oldest - message. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.messages.ListJobMessagesResponse: - Response to a request to list job - messages. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/messages', - }, -{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}/messages', - }, - ] - request, metadata = self._interceptor.pre_list_job_messages(request, metadata) - pb_request = messages.ListJobMessagesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = messages.ListJobMessagesResponse() - pb_resp = messages.ListJobMessagesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_job_messages(resp) - return resp - - @property - def list_job_messages(self) -> Callable[ - [messages.ListJobMessagesRequest], - messages.ListJobMessagesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListJobMessages(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'MessagesV1Beta3RestTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py deleted file mode 100644 index 14f89b3..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import MetricsV1Beta3Client -from .async_client import MetricsV1Beta3AsyncClient - -__all__ = ( - 'MetricsV1Beta3Client', - 'MetricsV1Beta3AsyncClient', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py deleted file mode 100644 index 0ebe3ad..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/async_client.py +++ /dev/null @@ -1,496 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.types import metrics -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport -from .client import MetricsV1Beta3Client - - -class MetricsV1Beta3AsyncClient: - """The Dataflow Metrics API lets you monitor the progress of - Dataflow jobs. - """ - - _client: MetricsV1Beta3Client - - DEFAULT_ENDPOINT = MetricsV1Beta3Client.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = MetricsV1Beta3Client.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(MetricsV1Beta3Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(MetricsV1Beta3Client.parse_common_billing_account_path) - common_folder_path = staticmethod(MetricsV1Beta3Client.common_folder_path) - parse_common_folder_path = staticmethod(MetricsV1Beta3Client.parse_common_folder_path) - common_organization_path = staticmethod(MetricsV1Beta3Client.common_organization_path) - parse_common_organization_path = staticmethod(MetricsV1Beta3Client.parse_common_organization_path) - common_project_path = staticmethod(MetricsV1Beta3Client.common_project_path) - parse_common_project_path = staticmethod(MetricsV1Beta3Client.parse_common_project_path) - common_location_path = staticmethod(MetricsV1Beta3Client.common_location_path) - parse_common_location_path = staticmethod(MetricsV1Beta3Client.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsV1Beta3AsyncClient: The constructed client. - """ - return MetricsV1Beta3Client.from_service_account_info.__func__(MetricsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsV1Beta3AsyncClient: The constructed client. - """ - return MetricsV1Beta3Client.from_service_account_file.__func__(MetricsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return MetricsV1Beta3Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> MetricsV1Beta3Transport: - """Returns the transport used by the client instance. - - Returns: - MetricsV1Beta3Transport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(MetricsV1Beta3Client).get_transport_class, type(MetricsV1Beta3Client)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricsV1Beta3Transport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metrics v1 beta3 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.MetricsV1Beta3Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = MetricsV1Beta3Client( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def get_job_metrics(self, - request: Optional[Union[metrics.GetJobMetricsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metrics.JobMetrics: - r"""Request the job status. - - To request the status of a job, we recommend using - ``projects.locations.jobs.getMetrics`` with a [regional - endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.getMetrics`` is not recommended, as you - can only request the status of jobs that are running in - ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_get_job_metrics(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobMetricsRequest( - ) - - # Make the request - response = await client.get_job_metrics(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest, dict]]): - The request object. Request to get job metrics. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.JobMetrics: - JobMetrics contains a collection of - metrics describing the detailed progress - of a Dataflow job. Metrics correspond to - user-defined and system-defined metrics - in the job. - - This resource captures only the most - recent values of each metric; - time-series data can be queried for them - (under the same metric names) from Cloud - Monitoring. - - """ - # Create or coerce a protobuf request object. - request = metrics.GetJobMetricsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_metrics, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_execution_details(self, - request: Optional[Union[metrics.GetJobExecutionDetailsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.GetJobExecutionDetailsAsyncPager: - r"""Request detailed information about the execution - status of the job. - EXPERIMENTAL. This API is subject to change or removal - without notice. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_get_job_execution_details(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobExecutionDetailsRequest( - ) - - # Make the request - page_result = client.get_job_execution_details(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest, dict]]): - The request object. Request to get job execution - details. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager: - Information about the execution of a - job. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = metrics.GetJobExecutionDetailsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_execution_details, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.GetJobExecutionDetailsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_stage_execution_details(self, - request: Optional[Union[metrics.GetStageExecutionDetailsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.GetStageExecutionDetailsAsyncPager: - r"""Request detailed information about the execution - status of a stage of the job. - - EXPERIMENTAL. This API is subject to change or removal - without notice. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_get_stage_execution_details(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetStageExecutionDetailsRequest( - ) - - # Make the request - page_result = client.get_stage_execution_details(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest, dict]]): - The request object. Request to get information about a - particular execution stage of a job. Currently only - tracked for Batch jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager: - Information about the workers and - work items within a stage. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - request = metrics.GetStageExecutionDetailsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_stage_execution_details, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - ("stage_id", request.stage_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.GetStageExecutionDetailsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MetricsV1Beta3AsyncClient", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py deleted file mode 100644 index e0e210d..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/client.py +++ /dev/null @@ -1,694 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.types import metrics -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .transports.grpc import MetricsV1Beta3GrpcTransport -from .transports.grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport -from .transports.rest import MetricsV1Beta3RestTransport - - -class MetricsV1Beta3ClientMeta(type): - """Metaclass for the MetricsV1Beta3 client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsV1Beta3Transport]] - _transport_registry["grpc"] = MetricsV1Beta3GrpcTransport - _transport_registry["grpc_asyncio"] = MetricsV1Beta3GrpcAsyncIOTransport - _transport_registry["rest"] = MetricsV1Beta3RestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[MetricsV1Beta3Transport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class MetricsV1Beta3Client(metaclass=MetricsV1Beta3ClientMeta): - """The Dataflow Metrics API lets you monitor the progress of - Dataflow jobs. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataflow.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsV1Beta3Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - MetricsV1Beta3Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> MetricsV1Beta3Transport: - """Returns the transport used by the client instance. - - Returns: - MetricsV1Beta3Transport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsV1Beta3Transport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the metrics v1 beta3 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, MetricsV1Beta3Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, MetricsV1Beta3Transport): - # transport is a MetricsV1Beta3Transport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def get_job_metrics(self, - request: Optional[Union[metrics.GetJobMetricsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> metrics.JobMetrics: - r"""Request the job status. - - To request the status of a job, we recommend using - ``projects.locations.jobs.getMetrics`` with a [regional - endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.getMetrics`` is not recommended, as you - can only request the status of jobs that are running in - ``us-central1``. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_get_job_metrics(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobMetricsRequest( - ) - - # Make the request - response = client.get_job_metrics(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest, dict]): - The request object. Request to get job metrics. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.JobMetrics: - JobMetrics contains a collection of - metrics describing the detailed progress - of a Dataflow job. Metrics correspond to - user-defined and system-defined metrics - in the job. - - This resource captures only the most - recent values of each metric; - time-series data can be queried for them - (under the same metric names) from Cloud - Monitoring. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a metrics.GetJobMetricsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metrics.GetJobMetricsRequest): - request = metrics.GetJobMetricsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_metrics] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_execution_details(self, - request: Optional[Union[metrics.GetJobExecutionDetailsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.GetJobExecutionDetailsPager: - r"""Request detailed information about the execution - status of the job. - EXPERIMENTAL. This API is subject to change or removal - without notice. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_get_job_execution_details(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobExecutionDetailsRequest( - ) - - # Make the request - page_result = client.get_job_execution_details(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest, dict]): - The request object. Request to get job execution - details. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager: - Information about the execution of a - job. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a metrics.GetJobExecutionDetailsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metrics.GetJobExecutionDetailsRequest): - request = metrics.GetJobExecutionDetailsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_execution_details] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.GetJobExecutionDetailsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_stage_execution_details(self, - request: Optional[Union[metrics.GetStageExecutionDetailsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.GetStageExecutionDetailsPager: - r"""Request detailed information about the execution - status of a stage of the job. - - EXPERIMENTAL. This API is subject to change or removal - without notice. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_get_stage_execution_details(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetStageExecutionDetailsRequest( - ) - - # Make the request - page_result = client.get_stage_execution_details(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest, dict]): - The request object. Request to get information about a - particular execution stage of a job. Currently only - tracked for Batch jobs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager: - Information about the workers and - work items within a stage. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a metrics.GetStageExecutionDetailsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, metrics.GetStageExecutionDetailsRequest): - request = metrics.GetStageExecutionDetailsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_stage_execution_details] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - ("stage_id", request.stage_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.GetStageExecutionDetailsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "MetricsV1Beta3Client", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py deleted file mode 100644 index b12c560..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/pagers.py +++ /dev/null @@ -1,260 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.dataflow_v1beta3.types import metrics - - -class GetJobExecutionDetailsPager: - """A pager for iterating through ``get_job_execution_details`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` object, and - provides an ``__iter__`` method to iterate through its - ``stages`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``GetJobExecutionDetails`` requests and continue to iterate - through the ``stages`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metrics.JobExecutionDetails], - request: metrics.GetJobExecutionDetailsRequest, - response: metrics.JobExecutionDetails, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.JobExecutionDetails): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metrics.GetJobExecutionDetailsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metrics.JobExecutionDetails]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metrics.StageSummary]: - for page in self.pages: - yield from page.stages - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class GetJobExecutionDetailsAsyncPager: - """A pager for iterating through ``get_job_execution_details`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` object, and - provides an ``__aiter__`` method to iterate through its - ``stages`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``GetJobExecutionDetails`` requests and continue to iterate - through the ``stages`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.JobExecutionDetails` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metrics.JobExecutionDetails]], - request: metrics.GetJobExecutionDetailsRequest, - response: metrics.JobExecutionDetails, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.JobExecutionDetails): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metrics.GetJobExecutionDetailsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metrics.JobExecutionDetails]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metrics.StageSummary]: - async def async_generator(): - async for page in self.pages: - for response in page.stages: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class GetStageExecutionDetailsPager: - """A pager for iterating through ``get_stage_execution_details`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` object, and - provides an ``__iter__`` method to iterate through its - ``workers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``GetStageExecutionDetails`` requests and continue to iterate - through the ``workers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., metrics.StageExecutionDetails], - request: metrics.GetStageExecutionDetailsRequest, - response: metrics.StageExecutionDetails, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.StageExecutionDetails): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metrics.GetStageExecutionDetailsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[metrics.StageExecutionDetails]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[metrics.WorkerDetails]: - for page in self.pages: - yield from page.workers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class GetStageExecutionDetailsAsyncPager: - """A pager for iterating through ``get_stage_execution_details`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` object, and - provides an ``__aiter__`` method to iterate through its - ``workers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``GetStageExecutionDetails`` requests and continue to iterate - through the ``workers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dataflow_v1beta3.types.StageExecutionDetails` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[metrics.StageExecutionDetails]], - request: metrics.GetStageExecutionDetailsRequest, - response: metrics.StageExecutionDetails, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest): - The initial request object. - response (google.cloud.dataflow_v1beta3.types.StageExecutionDetails): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = metrics.GetStageExecutionDetailsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[metrics.StageExecutionDetails]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[metrics.WorkerDetails]: - async def async_generator(): - async for page in self.pages: - for response in page.workers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py deleted file mode 100644 index 9e9fd57..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import MetricsV1Beta3Transport -from .grpc import MetricsV1Beta3GrpcTransport -from .grpc_asyncio import MetricsV1Beta3GrpcAsyncIOTransport -from .rest import MetricsV1Beta3RestTransport -from .rest import MetricsV1Beta3RestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[MetricsV1Beta3Transport]] -_transport_registry['grpc'] = MetricsV1Beta3GrpcTransport -_transport_registry['grpc_asyncio'] = MetricsV1Beta3GrpcAsyncIOTransport -_transport_registry['rest'] = MetricsV1Beta3RestTransport - -__all__ = ( - 'MetricsV1Beta3Transport', - 'MetricsV1Beta3GrpcTransport', - 'MetricsV1Beta3GrpcAsyncIOTransport', - 'MetricsV1Beta3RestTransport', - 'MetricsV1Beta3RestInterceptor', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py deleted file mode 100644 index 54c9b1f..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/base.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataflow_v1beta3.types import metrics - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class MetricsV1Beta3Transport(abc.ABC): - """Abstract transport class for MetricsV1Beta3.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', - ) - - DEFAULT_HOST: str = 'dataflow.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_job_metrics: gapic_v1.method.wrap_method( - self.get_job_metrics, - default_timeout=None, - client_info=client_info, - ), - self.get_job_execution_details: gapic_v1.method.wrap_method( - self.get_job_execution_details, - default_timeout=None, - client_info=client_info, - ), - self.get_stage_execution_details: gapic_v1.method.wrap_method( - self.get_stage_execution_details, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_job_metrics(self) -> Callable[ - [metrics.GetJobMetricsRequest], - Union[ - metrics.JobMetrics, - Awaitable[metrics.JobMetrics] - ]]: - raise NotImplementedError() - - @property - def get_job_execution_details(self) -> Callable[ - [metrics.GetJobExecutionDetailsRequest], - Union[ - metrics.JobExecutionDetails, - Awaitable[metrics.JobExecutionDetails] - ]]: - raise NotImplementedError() - - @property - def get_stage_execution_details(self) -> Callable[ - [metrics.GetStageExecutionDetailsRequest], - Union[ - metrics.StageExecutionDetails, - Awaitable[metrics.StageExecutionDetails] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'MetricsV1Beta3Transport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py deleted file mode 100644 index 14561c8..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc.py +++ /dev/null @@ -1,332 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataflow_v1beta3.types import metrics -from .base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO - - -class MetricsV1Beta3GrpcTransport(MetricsV1Beta3Transport): - """gRPC backend transport for MetricsV1Beta3. - - The Dataflow Metrics API lets you monitor the progress of - Dataflow jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def get_job_metrics(self) -> Callable[ - [metrics.GetJobMetricsRequest], - metrics.JobMetrics]: - r"""Return a callable for the get job metrics method over gRPC. - - Request the job status. - - To request the status of a job, we recommend using - ``projects.locations.jobs.getMetrics`` with a [regional - endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.getMetrics`` is not recommended, as you - can only request the status of jobs that are running in - ``us-central1``. - - Returns: - Callable[[~.GetJobMetricsRequest], - ~.JobMetrics]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_metrics' not in self._stubs: - self._stubs['get_job_metrics'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.MetricsV1Beta3/GetJobMetrics', - request_serializer=metrics.GetJobMetricsRequest.serialize, - response_deserializer=metrics.JobMetrics.deserialize, - ) - return self._stubs['get_job_metrics'] - - @property - def get_job_execution_details(self) -> Callable[ - [metrics.GetJobExecutionDetailsRequest], - metrics.JobExecutionDetails]: - r"""Return a callable for the get job execution details method over gRPC. - - Request detailed information about the execution - status of the job. - EXPERIMENTAL. This API is subject to change or removal - without notice. - - Returns: - Callable[[~.GetJobExecutionDetailsRequest], - ~.JobExecutionDetails]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_execution_details' not in self._stubs: - self._stubs['get_job_execution_details'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.MetricsV1Beta3/GetJobExecutionDetails', - request_serializer=metrics.GetJobExecutionDetailsRequest.serialize, - response_deserializer=metrics.JobExecutionDetails.deserialize, - ) - return self._stubs['get_job_execution_details'] - - @property - def get_stage_execution_details(self) -> Callable[ - [metrics.GetStageExecutionDetailsRequest], - metrics.StageExecutionDetails]: - r"""Return a callable for the get stage execution details method over gRPC. - - Request detailed information about the execution - status of a stage of the job. - - EXPERIMENTAL. This API is subject to change or removal - without notice. - - Returns: - Callable[[~.GetStageExecutionDetailsRequest], - ~.StageExecutionDetails]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stage_execution_details' not in self._stubs: - self._stubs['get_stage_execution_details'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.MetricsV1Beta3/GetStageExecutionDetails', - request_serializer=metrics.GetStageExecutionDetailsRequest.serialize, - response_deserializer=metrics.StageExecutionDetails.deserialize, - ) - return self._stubs['get_stage_execution_details'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'MetricsV1Beta3GrpcTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py deleted file mode 100644 index 280f1de..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/grpc_asyncio.py +++ /dev/null @@ -1,331 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataflow_v1beta3.types import metrics -from .base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .grpc import MetricsV1Beta3GrpcTransport - - -class MetricsV1Beta3GrpcAsyncIOTransport(MetricsV1Beta3Transport): - """gRPC AsyncIO backend transport for MetricsV1Beta3. - - The Dataflow Metrics API lets you monitor the progress of - Dataflow jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def get_job_metrics(self) -> Callable[ - [metrics.GetJobMetricsRequest], - Awaitable[metrics.JobMetrics]]: - r"""Return a callable for the get job metrics method over gRPC. - - Request the job status. - - To request the status of a job, we recommend using - ``projects.locations.jobs.getMetrics`` with a [regional - endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). - Using ``projects.jobs.getMetrics`` is not recommended, as you - can only request the status of jobs that are running in - ``us-central1``. - - Returns: - Callable[[~.GetJobMetricsRequest], - Awaitable[~.JobMetrics]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_metrics' not in self._stubs: - self._stubs['get_job_metrics'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.MetricsV1Beta3/GetJobMetrics', - request_serializer=metrics.GetJobMetricsRequest.serialize, - response_deserializer=metrics.JobMetrics.deserialize, - ) - return self._stubs['get_job_metrics'] - - @property - def get_job_execution_details(self) -> Callable[ - [metrics.GetJobExecutionDetailsRequest], - Awaitable[metrics.JobExecutionDetails]]: - r"""Return a callable for the get job execution details method over gRPC. - - Request detailed information about the execution - status of the job. - EXPERIMENTAL. This API is subject to change or removal - without notice. - - Returns: - Callable[[~.GetJobExecutionDetailsRequest], - Awaitable[~.JobExecutionDetails]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_execution_details' not in self._stubs: - self._stubs['get_job_execution_details'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.MetricsV1Beta3/GetJobExecutionDetails', - request_serializer=metrics.GetJobExecutionDetailsRequest.serialize, - response_deserializer=metrics.JobExecutionDetails.deserialize, - ) - return self._stubs['get_job_execution_details'] - - @property - def get_stage_execution_details(self) -> Callable[ - [metrics.GetStageExecutionDetailsRequest], - Awaitable[metrics.StageExecutionDetails]]: - r"""Return a callable for the get stage execution details method over gRPC. - - Request detailed information about the execution - status of a stage of the job. - - EXPERIMENTAL. This API is subject to change or removal - without notice. - - Returns: - Callable[[~.GetStageExecutionDetailsRequest], - Awaitable[~.StageExecutionDetails]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stage_execution_details' not in self._stubs: - self._stubs['get_stage_execution_details'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.MetricsV1Beta3/GetStageExecutionDetails', - request_serializer=metrics.GetStageExecutionDetailsRequest.serialize, - response_deserializer=metrics.StageExecutionDetails.deserialize, - ) - return self._stubs['get_stage_execution_details'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'MetricsV1Beta3GrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py deleted file mode 100644 index 70be7e4..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/metrics_v1_beta3/transports/rest.py +++ /dev/null @@ -1,508 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataflow_v1beta3.types import metrics - -from .base import MetricsV1Beta3Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class MetricsV1Beta3RestInterceptor: - """Interceptor for MetricsV1Beta3. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the MetricsV1Beta3RestTransport. - - .. code-block:: python - class MyCustomMetricsV1Beta3Interceptor(MetricsV1Beta3RestInterceptor): - def pre_get_job_execution_details(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job_execution_details(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_job_metrics(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_job_metrics(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_stage_execution_details(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_stage_execution_details(self, response): - logging.log(f"Received response: {response}") - return response - - transport = MetricsV1Beta3RestTransport(interceptor=MyCustomMetricsV1Beta3Interceptor()) - client = MetricsV1Beta3Client(transport=transport) - - - """ - def pre_get_job_execution_details(self, request: metrics.GetJobExecutionDetailsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[metrics.GetJobExecutionDetailsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_job_execution_details - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsV1Beta3 server. - """ - return request, metadata - - def post_get_job_execution_details(self, response: metrics.JobExecutionDetails) -> metrics.JobExecutionDetails: - """Post-rpc interceptor for get_job_execution_details - - Override in a subclass to manipulate the response - after it is returned by the MetricsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_get_job_metrics(self, request: metrics.GetJobMetricsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[metrics.GetJobMetricsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_job_metrics - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsV1Beta3 server. - """ - return request, metadata - - def post_get_job_metrics(self, response: metrics.JobMetrics) -> metrics.JobMetrics: - """Post-rpc interceptor for get_job_metrics - - Override in a subclass to manipulate the response - after it is returned by the MetricsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_get_stage_execution_details(self, request: metrics.GetStageExecutionDetailsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[metrics.GetStageExecutionDetailsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_stage_execution_details - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsV1Beta3 server. - """ - return request, metadata - - def post_get_stage_execution_details(self, response: metrics.StageExecutionDetails) -> metrics.StageExecutionDetails: - """Post-rpc interceptor for get_stage_execution_details - - Override in a subclass to manipulate the response - after it is returned by the MetricsV1Beta3 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class MetricsV1Beta3RestStub: - _session: AuthorizedSession - _host: str - _interceptor: MetricsV1Beta3RestInterceptor - - -class MetricsV1Beta3RestTransport(MetricsV1Beta3Transport): - """REST backend transport for MetricsV1Beta3. - - The Dataflow Metrics API lets you monitor the progress of - Dataflow jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MetricsV1Beta3RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or MetricsV1Beta3RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _GetJobExecutionDetails(MetricsV1Beta3RestStub): - def __hash__(self): - return hash("GetJobExecutionDetails") - - def __call__(self, - request: metrics.GetJobExecutionDetailsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> metrics.JobExecutionDetails: - r"""Call the get job execution details method over HTTP. - - Args: - request (~.metrics.GetJobExecutionDetailsRequest): - The request object. Request to get job execution details. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.metrics.JobExecutionDetails: - Information about the execution of a - job. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/executionDetails', - }, - ] - request, metadata = self._interceptor.pre_get_job_execution_details(request, metadata) - pb_request = metrics.GetJobExecutionDetailsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metrics.JobExecutionDetails() - pb_resp = metrics.JobExecutionDetails.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_job_execution_details(resp) - return resp - - class _GetJobMetrics(MetricsV1Beta3RestStub): - def __hash__(self): - return hash("GetJobMetrics") - - def __call__(self, - request: metrics.GetJobMetricsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> metrics.JobMetrics: - r"""Call the get job metrics method over HTTP. - - Args: - request (~.metrics.GetJobMetricsRequest): - The request object. Request to get job metrics. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.metrics.JobMetrics: - JobMetrics contains a collection of - metrics describing the detailed progress - of a Dataflow job. Metrics correspond to - user-defined and system-defined metrics - in the job. - - This resource captures only the most - recent values of each metric; - time-series data can be queried for them - (under the same metric names) from Cloud - Monitoring. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/metrics', - }, -{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/jobs/{job_id}/metrics', - }, - ] - request, metadata = self._interceptor.pre_get_job_metrics(request, metadata) - pb_request = metrics.GetJobMetricsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metrics.JobMetrics() - pb_resp = metrics.JobMetrics.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_job_metrics(resp) - return resp - - class _GetStageExecutionDetails(MetricsV1Beta3RestStub): - def __hash__(self): - return hash("GetStageExecutionDetails") - - def __call__(self, - request: metrics.GetStageExecutionDetailsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> metrics.StageExecutionDetails: - r"""Call the get stage execution - details method over HTTP. - - Args: - request (~.metrics.GetStageExecutionDetailsRequest): - The request object. Request to get information about a - particular execution stage of a job. - Currently only tracked for Batch jobs. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.metrics.StageExecutionDetails: - Information about the workers and - work items within a stage. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/stages/{stage_id}/executionDetails', - }, - ] - request, metadata = self._interceptor.pre_get_stage_execution_details(request, metadata) - pb_request = metrics.GetStageExecutionDetailsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = metrics.StageExecutionDetails() - pb_resp = metrics.StageExecutionDetails.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_stage_execution_details(resp) - return resp - - @property - def get_job_execution_details(self) -> Callable[ - [metrics.GetJobExecutionDetailsRequest], - metrics.JobExecutionDetails]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJobExecutionDetails(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_job_metrics(self) -> Callable[ - [metrics.GetJobMetricsRequest], - metrics.JobMetrics]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetJobMetrics(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_stage_execution_details(self) -> Callable[ - [metrics.GetStageExecutionDetailsRequest], - metrics.StageExecutionDetails]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetStageExecutionDetails(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'MetricsV1Beta3RestTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py deleted file mode 100644 index 85c4cd1..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import SnapshotsV1Beta3Client -from .async_client import SnapshotsV1Beta3AsyncClient - -__all__ = ( - 'SnapshotsV1Beta3Client', - 'SnapshotsV1Beta3AsyncClient', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py deleted file mode 100644 index 1c6acc5..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/async_client.py +++ /dev/null @@ -1,437 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.types import snapshots -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport -from .client import SnapshotsV1Beta3Client - - -class SnapshotsV1Beta3AsyncClient: - """Provides methods to manage snapshots of Google Cloud Dataflow - jobs. - """ - - _client: SnapshotsV1Beta3Client - - DEFAULT_ENDPOINT = SnapshotsV1Beta3Client.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = SnapshotsV1Beta3Client.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(SnapshotsV1Beta3Client.common_billing_account_path) - parse_common_billing_account_path = staticmethod(SnapshotsV1Beta3Client.parse_common_billing_account_path) - common_folder_path = staticmethod(SnapshotsV1Beta3Client.common_folder_path) - parse_common_folder_path = staticmethod(SnapshotsV1Beta3Client.parse_common_folder_path) - common_organization_path = staticmethod(SnapshotsV1Beta3Client.common_organization_path) - parse_common_organization_path = staticmethod(SnapshotsV1Beta3Client.parse_common_organization_path) - common_project_path = staticmethod(SnapshotsV1Beta3Client.common_project_path) - parse_common_project_path = staticmethod(SnapshotsV1Beta3Client.parse_common_project_path) - common_location_path = staticmethod(SnapshotsV1Beta3Client.common_location_path) - parse_common_location_path = staticmethod(SnapshotsV1Beta3Client.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SnapshotsV1Beta3AsyncClient: The constructed client. - """ - return SnapshotsV1Beta3Client.from_service_account_info.__func__(SnapshotsV1Beta3AsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SnapshotsV1Beta3AsyncClient: The constructed client. - """ - return SnapshotsV1Beta3Client.from_service_account_file.__func__(SnapshotsV1Beta3AsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return SnapshotsV1Beta3Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> SnapshotsV1Beta3Transport: - """Returns the transport used by the client instance. - - Returns: - SnapshotsV1Beta3Transport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(SnapshotsV1Beta3Client).get_transport_class, type(SnapshotsV1Beta3Client)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, SnapshotsV1Beta3Transport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the snapshots v1 beta3 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.SnapshotsV1Beta3Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = SnapshotsV1Beta3Client( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def get_snapshot(self, - request: Optional[Union[snapshots.GetSnapshotRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshots.Snapshot: - r"""Gets information about a snapshot. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_get_snapshot(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetSnapshotRequest( - ) - - # Make the request - response = await client.get_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetSnapshotRequest, dict]]): - The request object. Request to get information about a - snapshot - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Snapshot: - Represents a snapshot of a job. - """ - # Create or coerce a protobuf request object. - request = snapshots.GetSnapshotRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_snapshot, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("snapshot_id", request.snapshot_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_snapshot(self, - request: Optional[Union[snapshots.DeleteSnapshotRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshots.DeleteSnapshotResponse: - r"""Deletes a snapshot. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_delete_snapshot(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.DeleteSnapshotRequest( - ) - - # Make the request - response = await client.delete_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest, dict]]): - The request object. Request to delete a snapshot. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse: - Response from deleting a snapshot. - """ - # Create or coerce a protobuf request object. - request = snapshots.DeleteSnapshotRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_snapshot, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("snapshot_id", request.snapshot_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_snapshots(self, - request: Optional[Union[snapshots.ListSnapshotsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshots.ListSnapshotsResponse: - r"""Lists snapshots. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_list_snapshots(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListSnapshotsRequest( - ) - - # Make the request - response = await client.list_snapshots(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest, dict]]): - The request object. Request to list snapshots. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse: - List of snapshots. - """ - # Create or coerce a protobuf request object. - request = snapshots.ListSnapshotsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_snapshots, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "SnapshotsV1Beta3AsyncClient", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py deleted file mode 100644 index 8e5f2e0..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/client.py +++ /dev/null @@ -1,635 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.types import snapshots -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .transports.grpc import SnapshotsV1Beta3GrpcTransport -from .transports.grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport -from .transports.rest import SnapshotsV1Beta3RestTransport - - -class SnapshotsV1Beta3ClientMeta(type): - """Metaclass for the SnapshotsV1Beta3 client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsV1Beta3Transport]] - _transport_registry["grpc"] = SnapshotsV1Beta3GrpcTransport - _transport_registry["grpc_asyncio"] = SnapshotsV1Beta3GrpcAsyncIOTransport - _transport_registry["rest"] = SnapshotsV1Beta3RestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[SnapshotsV1Beta3Transport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class SnapshotsV1Beta3Client(metaclass=SnapshotsV1Beta3ClientMeta): - """Provides methods to manage snapshots of Google Cloud Dataflow - jobs. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataflow.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SnapshotsV1Beta3Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SnapshotsV1Beta3Client: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> SnapshotsV1Beta3Transport: - """Returns the transport used by the client instance. - - Returns: - SnapshotsV1Beta3Transport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SnapshotsV1Beta3Transport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the snapshots v1 beta3 client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, SnapshotsV1Beta3Transport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, SnapshotsV1Beta3Transport): - # transport is a SnapshotsV1Beta3Transport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def get_snapshot(self, - request: Optional[Union[snapshots.GetSnapshotRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshots.Snapshot: - r"""Gets information about a snapshot. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_get_snapshot(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetSnapshotRequest( - ) - - # Make the request - response = client.get_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.GetSnapshotRequest, dict]): - The request object. Request to get information about a - snapshot - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Snapshot: - Represents a snapshot of a job. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a snapshots.GetSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, snapshots.GetSnapshotRequest): - request = snapshots.GetSnapshotRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("snapshot_id", request.snapshot_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_snapshot(self, - request: Optional[Union[snapshots.DeleteSnapshotRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshots.DeleteSnapshotResponse: - r"""Deletes a snapshot. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_delete_snapshot(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.DeleteSnapshotRequest( - ) - - # Make the request - response = client.delete_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest, dict]): - The request object. Request to delete a snapshot. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse: - Response from deleting a snapshot. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a snapshots.DeleteSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, snapshots.DeleteSnapshotRequest): - request = snapshots.DeleteSnapshotRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("snapshot_id", request.snapshot_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_snapshots(self, - request: Optional[Union[snapshots.ListSnapshotsRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> snapshots.ListSnapshotsResponse: - r"""Lists snapshots. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_list_snapshots(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListSnapshotsRequest( - ) - - # Make the request - response = client.list_snapshots(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest, dict]): - The request object. Request to list snapshots. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse: - List of snapshots. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a snapshots.ListSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, snapshots.ListSnapshotsRequest): - request = snapshots.ListSnapshotsRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_snapshots] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - ("job_id", request.job_id), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "SnapshotsV1Beta3Client", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py deleted file mode 100644 index 87e928c..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import SnapshotsV1Beta3Transport -from .grpc import SnapshotsV1Beta3GrpcTransport -from .grpc_asyncio import SnapshotsV1Beta3GrpcAsyncIOTransport -from .rest import SnapshotsV1Beta3RestTransport -from .rest import SnapshotsV1Beta3RestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[SnapshotsV1Beta3Transport]] -_transport_registry['grpc'] = SnapshotsV1Beta3GrpcTransport -_transport_registry['grpc_asyncio'] = SnapshotsV1Beta3GrpcAsyncIOTransport -_transport_registry['rest'] = SnapshotsV1Beta3RestTransport - -__all__ = ( - 'SnapshotsV1Beta3Transport', - 'SnapshotsV1Beta3GrpcTransport', - 'SnapshotsV1Beta3GrpcAsyncIOTransport', - 'SnapshotsV1Beta3RestTransport', - 'SnapshotsV1Beta3RestInterceptor', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py deleted file mode 100644 index c8ebbbc..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/base.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataflow_v1beta3.types import snapshots - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class SnapshotsV1Beta3Transport(abc.ABC): - """Abstract transport class for SnapshotsV1Beta3.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', - ) - - DEFAULT_HOST: str = 'dataflow.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.get_snapshot: gapic_v1.method.wrap_method( - self.get_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.delete_snapshot: gapic_v1.method.wrap_method( - self.delete_snapshot, - default_timeout=None, - client_info=client_info, - ), - self.list_snapshots: gapic_v1.method.wrap_method( - self.list_snapshots, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_snapshot(self) -> Callable[ - [snapshots.GetSnapshotRequest], - Union[ - snapshots.Snapshot, - Awaitable[snapshots.Snapshot] - ]]: - raise NotImplementedError() - - @property - def delete_snapshot(self) -> Callable[ - [snapshots.DeleteSnapshotRequest], - Union[ - snapshots.DeleteSnapshotResponse, - Awaitable[snapshots.DeleteSnapshotResponse] - ]]: - raise NotImplementedError() - - @property - def list_snapshots(self) -> Callable[ - [snapshots.ListSnapshotsRequest], - Union[ - snapshots.ListSnapshotsResponse, - Awaitable[snapshots.ListSnapshotsResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'SnapshotsV1Beta3Transport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py deleted file mode 100644 index 8499964..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc.py +++ /dev/null @@ -1,317 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataflow_v1beta3.types import snapshots -from .base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO - - -class SnapshotsV1Beta3GrpcTransport(SnapshotsV1Beta3Transport): - """gRPC backend transport for SnapshotsV1Beta3. - - Provides methods to manage snapshots of Google Cloud Dataflow - jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def get_snapshot(self) -> Callable[ - [snapshots.GetSnapshotRequest], - snapshots.Snapshot]: - r"""Return a callable for the get snapshot method over gRPC. - - Gets information about a snapshot. - - Returns: - Callable[[~.GetSnapshotRequest], - ~.Snapshot]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_snapshot' not in self._stubs: - self._stubs['get_snapshot'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.SnapshotsV1Beta3/GetSnapshot', - request_serializer=snapshots.GetSnapshotRequest.serialize, - response_deserializer=snapshots.Snapshot.deserialize, - ) - return self._stubs['get_snapshot'] - - @property - def delete_snapshot(self) -> Callable[ - [snapshots.DeleteSnapshotRequest], - snapshots.DeleteSnapshotResponse]: - r"""Return a callable for the delete snapshot method over gRPC. - - Deletes a snapshot. - - Returns: - Callable[[~.DeleteSnapshotRequest], - ~.DeleteSnapshotResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_snapshot' not in self._stubs: - self._stubs['delete_snapshot'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.SnapshotsV1Beta3/DeleteSnapshot', - request_serializer=snapshots.DeleteSnapshotRequest.serialize, - response_deserializer=snapshots.DeleteSnapshotResponse.deserialize, - ) - return self._stubs['delete_snapshot'] - - @property - def list_snapshots(self) -> Callable[ - [snapshots.ListSnapshotsRequest], - snapshots.ListSnapshotsResponse]: - r"""Return a callable for the list snapshots method over gRPC. - - Lists snapshots. - - Returns: - Callable[[~.ListSnapshotsRequest], - ~.ListSnapshotsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_snapshots' not in self._stubs: - self._stubs['list_snapshots'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.SnapshotsV1Beta3/ListSnapshots', - request_serializer=snapshots.ListSnapshotsRequest.serialize, - response_deserializer=snapshots.ListSnapshotsResponse.deserialize, - ) - return self._stubs['list_snapshots'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'SnapshotsV1Beta3GrpcTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py deleted file mode 100644 index ea290a2..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/grpc_asyncio.py +++ /dev/null @@ -1,316 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataflow_v1beta3.types import snapshots -from .base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO -from .grpc import SnapshotsV1Beta3GrpcTransport - - -class SnapshotsV1Beta3GrpcAsyncIOTransport(SnapshotsV1Beta3Transport): - """gRPC AsyncIO backend transport for SnapshotsV1Beta3. - - Provides methods to manage snapshots of Google Cloud Dataflow - jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def get_snapshot(self) -> Callable[ - [snapshots.GetSnapshotRequest], - Awaitable[snapshots.Snapshot]]: - r"""Return a callable for the get snapshot method over gRPC. - - Gets information about a snapshot. - - Returns: - Callable[[~.GetSnapshotRequest], - Awaitable[~.Snapshot]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_snapshot' not in self._stubs: - self._stubs['get_snapshot'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.SnapshotsV1Beta3/GetSnapshot', - request_serializer=snapshots.GetSnapshotRequest.serialize, - response_deserializer=snapshots.Snapshot.deserialize, - ) - return self._stubs['get_snapshot'] - - @property - def delete_snapshot(self) -> Callable[ - [snapshots.DeleteSnapshotRequest], - Awaitable[snapshots.DeleteSnapshotResponse]]: - r"""Return a callable for the delete snapshot method over gRPC. - - Deletes a snapshot. - - Returns: - Callable[[~.DeleteSnapshotRequest], - Awaitable[~.DeleteSnapshotResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_snapshot' not in self._stubs: - self._stubs['delete_snapshot'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.SnapshotsV1Beta3/DeleteSnapshot', - request_serializer=snapshots.DeleteSnapshotRequest.serialize, - response_deserializer=snapshots.DeleteSnapshotResponse.deserialize, - ) - return self._stubs['delete_snapshot'] - - @property - def list_snapshots(self) -> Callable[ - [snapshots.ListSnapshotsRequest], - Awaitable[snapshots.ListSnapshotsResponse]]: - r"""Return a callable for the list snapshots method over gRPC. - - Lists snapshots. - - Returns: - Callable[[~.ListSnapshotsRequest], - Awaitable[~.ListSnapshotsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_snapshots' not in self._stubs: - self._stubs['list_snapshots'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.SnapshotsV1Beta3/ListSnapshots', - request_serializer=snapshots.ListSnapshotsRequest.serialize, - response_deserializer=snapshots.ListSnapshotsResponse.deserialize, - ) - return self._stubs['list_snapshots'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'SnapshotsV1Beta3GrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py deleted file mode 100644 index bb10151..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/snapshots_v1_beta3/transports/rest.py +++ /dev/null @@ -1,503 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataflow_v1beta3.types import snapshots - -from .base import SnapshotsV1Beta3Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class SnapshotsV1Beta3RestInterceptor: - """Interceptor for SnapshotsV1Beta3. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the SnapshotsV1Beta3RestTransport. - - .. code-block:: python - class MyCustomSnapshotsV1Beta3Interceptor(SnapshotsV1Beta3RestInterceptor): - def pre_delete_snapshot(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_delete_snapshot(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_snapshot(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_snapshot(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_snapshots(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_snapshots(self, response): - logging.log(f"Received response: {response}") - return response - - transport = SnapshotsV1Beta3RestTransport(interceptor=MyCustomSnapshotsV1Beta3Interceptor()) - client = SnapshotsV1Beta3Client(transport=transport) - - - """ - def pre_delete_snapshot(self, request: snapshots.DeleteSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[snapshots.DeleteSnapshotRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_snapshot - - Override in a subclass to manipulate the request or metadata - before they are sent to the SnapshotsV1Beta3 server. - """ - return request, metadata - - def post_delete_snapshot(self, response: snapshots.DeleteSnapshotResponse) -> snapshots.DeleteSnapshotResponse: - """Post-rpc interceptor for delete_snapshot - - Override in a subclass to manipulate the response - after it is returned by the SnapshotsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_get_snapshot(self, request: snapshots.GetSnapshotRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[snapshots.GetSnapshotRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_snapshot - - Override in a subclass to manipulate the request or metadata - before they are sent to the SnapshotsV1Beta3 server. - """ - return request, metadata - - def post_get_snapshot(self, response: snapshots.Snapshot) -> snapshots.Snapshot: - """Post-rpc interceptor for get_snapshot - - Override in a subclass to manipulate the response - after it is returned by the SnapshotsV1Beta3 server but before - it is returned to user code. - """ - return response - def pre_list_snapshots(self, request: snapshots.ListSnapshotsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[snapshots.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_snapshots - - Override in a subclass to manipulate the request or metadata - before they are sent to the SnapshotsV1Beta3 server. - """ - return request, metadata - - def post_list_snapshots(self, response: snapshots.ListSnapshotsResponse) -> snapshots.ListSnapshotsResponse: - """Post-rpc interceptor for list_snapshots - - Override in a subclass to manipulate the response - after it is returned by the SnapshotsV1Beta3 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class SnapshotsV1Beta3RestStub: - _session: AuthorizedSession - _host: str - _interceptor: SnapshotsV1Beta3RestInterceptor - - -class SnapshotsV1Beta3RestTransport(SnapshotsV1Beta3Transport): - """REST backend transport for SnapshotsV1Beta3. - - Provides methods to manage snapshots of Google Cloud Dataflow - jobs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[SnapshotsV1Beta3RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or SnapshotsV1Beta3RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _DeleteSnapshot(SnapshotsV1Beta3RestStub): - def __hash__(self): - return hash("DeleteSnapshot") - - def __call__(self, - request: snapshots.DeleteSnapshotRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> snapshots.DeleteSnapshotResponse: - r"""Call the delete snapshot method over HTTP. - - Args: - request (~.snapshots.DeleteSnapshotRequest): - The request object. Request to delete a snapshot. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.snapshots.DeleteSnapshotResponse: - Response from deleting a snapshot. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/snapshots/{snapshot_id}', - }, -{ - 'method': 'delete', - 'uri': '/v1b3/projects/{project_id}/snapshots', - }, - ] - request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) - pb_request = snapshots.DeleteSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = snapshots.DeleteSnapshotResponse() - pb_resp = snapshots.DeleteSnapshotResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_snapshot(resp) - return resp - - class _GetSnapshot(SnapshotsV1Beta3RestStub): - def __hash__(self): - return hash("GetSnapshot") - - def __call__(self, - request: snapshots.GetSnapshotRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> snapshots.Snapshot: - r"""Call the get snapshot method over HTTP. - - Args: - request (~.snapshots.GetSnapshotRequest): - The request object. Request to get information about a - snapshot - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.snapshots.Snapshot: - Represents a snapshot of a job. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/snapshots/{snapshot_id}', - }, -{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/snapshots/{snapshot_id}', - }, - ] - request, metadata = self._interceptor.pre_get_snapshot(request, metadata) - pb_request = snapshots.GetSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = snapshots.Snapshot() - pb_resp = snapshots.Snapshot.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_snapshot(resp) - return resp - - class _ListSnapshots(SnapshotsV1Beta3RestStub): - def __hash__(self): - return hash("ListSnapshots") - - def __call__(self, - request: snapshots.ListSnapshotsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> snapshots.ListSnapshotsResponse: - r"""Call the list snapshots method over HTTP. - - Args: - request (~.snapshots.ListSnapshotsRequest): - The request object. Request to list snapshots. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.snapshots.ListSnapshotsResponse: - List of snapshots. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/jobs/{job_id}/snapshots', - }, -{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/snapshots', - }, -{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/snapshots', - }, - ] - request, metadata = self._interceptor.pre_list_snapshots(request, metadata) - pb_request = snapshots.ListSnapshotsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = snapshots.ListSnapshotsResponse() - pb_resp = snapshots.ListSnapshotsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_snapshots(resp) - return resp - - @property - def delete_snapshot(self) -> Callable[ - [snapshots.DeleteSnapshotRequest], - snapshots.DeleteSnapshotResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteSnapshot(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_snapshot(self) -> Callable[ - [snapshots.GetSnapshotRequest], - snapshots.Snapshot]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetSnapshot(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_snapshots(self) -> Callable[ - [snapshots.ListSnapshotsRequest], - snapshots.ListSnapshotsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListSnapshots(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'SnapshotsV1Beta3RestTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py deleted file mode 100644 index 3458541..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import TemplatesServiceClient -from .async_client import TemplatesServiceAsyncClient - -__all__ = ( - 'TemplatesServiceClient', - 'TemplatesServiceAsyncClient', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py deleted file mode 100644 index 892fc1e..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/async_client.py +++ /dev/null @@ -1,446 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.types import environment -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport -from .client import TemplatesServiceClient - - -class TemplatesServiceAsyncClient: - """Provides a method to create Cloud Dataflow jobs from - templates. - """ - - _client: TemplatesServiceClient - - DEFAULT_ENDPOINT = TemplatesServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = TemplatesServiceClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod(TemplatesServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(TemplatesServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(TemplatesServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(TemplatesServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(TemplatesServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(TemplatesServiceClient.parse_common_organization_path) - common_project_path = staticmethod(TemplatesServiceClient.common_project_path) - parse_common_project_path = staticmethod(TemplatesServiceClient.parse_common_project_path) - common_location_path = staticmethod(TemplatesServiceClient.common_location_path) - parse_common_location_path = staticmethod(TemplatesServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TemplatesServiceAsyncClient: The constructed client. - """ - return TemplatesServiceClient.from_service_account_info.__func__(TemplatesServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TemplatesServiceAsyncClient: The constructed client. - """ - return TemplatesServiceClient.from_service_account_file.__func__(TemplatesServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return TemplatesServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> TemplatesServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TemplatesServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(TemplatesServiceClient).get_transport_class, type(TemplatesServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, TemplatesServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the templates service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.TemplatesServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = TemplatesServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_job_from_template(self, - request: Optional[Union[templates.CreateJobFromTemplateRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Creates a Cloud Dataflow job from a template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_create_job_from_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceAsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.CreateJobFromTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = await client.create_job_from_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest, dict]]): - The request object. A request to create a Cloud Dataflow - job from a template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - # Create or coerce a protobuf request object. - request = templates.CreateJobFromTemplateRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_from_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def launch_template(self, - request: Optional[Union[templates.LaunchTemplateRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> templates.LaunchTemplateResponse: - r"""Launch a template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_launch_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceAsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.LaunchTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = await client.launch_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest, dict]]): - The request object. A request to launch a template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse: - Response to the request to launch a - template. - - """ - # Create or coerce a protobuf request object. - request = templates.LaunchTemplateRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.launch_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_template(self, - request: Optional[Union[templates.GetTemplateRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> templates.GetTemplateResponse: - r"""Get the template associated with a template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - async def sample_get_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceAsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = await client.get_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.dataflow_v1beta3.types.GetTemplateRequest, dict]]): - The request object. A request to retrieve a Cloud - Dataflow job template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.GetTemplateResponse: - The response to a GetTemplate - request. - - """ - # Create or coerce a protobuf request object. - request = templates.GetTemplateRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_template, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "TemplatesServiceAsyncClient", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/client.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/client.py deleted file mode 100644 index 5254aca..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/client.py +++ /dev/null @@ -1,644 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.dataflow_v1beta3.types import environment -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import TemplatesServiceGrpcTransport -from .transports.grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport -from .transports.rest import TemplatesServiceRestTransport - - -class TemplatesServiceClientMeta(type): - """Metaclass for the TemplatesService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[TemplatesServiceTransport]] - _transport_registry["grpc"] = TemplatesServiceGrpcTransport - _transport_registry["grpc_asyncio"] = TemplatesServiceGrpcAsyncIOTransport - _transport_registry["rest"] = TemplatesServiceRestTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[TemplatesServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class TemplatesServiceClient(metaclass=TemplatesServiceClientMeta): - """Provides a method to create Cloud Dataflow jobs from - templates. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dataflow.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TemplatesServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TemplatesServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> TemplatesServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TemplatesServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TemplatesServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the templates service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, TemplatesServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, TemplatesServiceTransport): - # transport is a TemplatesServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_job_from_template(self, - request: Optional[Union[templates.CreateJobFromTemplateRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> jobs.Job: - r"""Creates a Cloud Dataflow job from a template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_create_job_from_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.CreateJobFromTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = client.create_job_from_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest, dict]): - The request object. A request to create a Cloud Dataflow - job from a template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a templates.CreateJobFromTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, templates.CreateJobFromTemplateRequest): - request = templates.CreateJobFromTemplateRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_from_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def launch_template(self, - request: Optional[Union[templates.LaunchTemplateRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> templates.LaunchTemplateResponse: - r"""Launch a template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_launch_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.LaunchTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = client.launch_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest, dict]): - The request object. A request to launch a template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse: - Response to the request to launch a - template. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a templates.LaunchTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, templates.LaunchTemplateRequest): - request = templates.LaunchTemplateRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.launch_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_template(self, - request: Optional[Union[templates.GetTemplateRequest, dict]] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> templates.GetTemplateResponse: - r"""Get the template associated with a template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud import dataflow_v1beta3 - - def sample_get_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = client.get_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.dataflow_v1beta3.types.GetTemplateRequest, dict]): - The request object. A request to retrieve a Cloud - Dataflow job template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dataflow_v1beta3.types.GetTemplateResponse: - The response to a GetTemplate - request. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a templates.GetTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, templates.GetTemplateRequest): - request = templates.GetTemplateRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project_id", request.project_id), - ("location", request.location), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "TemplatesServiceClient", -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py deleted file mode 100644 index 7a56700..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import TemplatesServiceTransport -from .grpc import TemplatesServiceGrpcTransport -from .grpc_asyncio import TemplatesServiceGrpcAsyncIOTransport -from .rest import TemplatesServiceRestTransport -from .rest import TemplatesServiceRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TemplatesServiceTransport]] -_transport_registry['grpc'] = TemplatesServiceGrpcTransport -_transport_registry['grpc_asyncio'] = TemplatesServiceGrpcAsyncIOTransport -_transport_registry['rest'] = TemplatesServiceRestTransport - -__all__ = ( - 'TemplatesServiceTransport', - 'TemplatesServiceGrpcTransport', - 'TemplatesServiceGrpcAsyncIOTransport', - 'TemplatesServiceRestTransport', - 'TemplatesServiceRestInterceptor', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py deleted file mode 100644 index 78fab81..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/base.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.dataflow_v1beta3 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class TemplatesServiceTransport(abc.ABC): - """Abstract transport class for TemplatesService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', - ) - - DEFAULT_HOST: str = 'dataflow.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_job_from_template: gapic_v1.method.wrap_method( - self.create_job_from_template, - default_timeout=None, - client_info=client_info, - ), - self.launch_template: gapic_v1.method.wrap_method( - self.launch_template, - default_timeout=None, - client_info=client_info, - ), - self.get_template: gapic_v1.method.wrap_method( - self.get_template, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_job_from_template(self) -> Callable[ - [templates.CreateJobFromTemplateRequest], - Union[ - jobs.Job, - Awaitable[jobs.Job] - ]]: - raise NotImplementedError() - - @property - def launch_template(self) -> Callable[ - [templates.LaunchTemplateRequest], - Union[ - templates.LaunchTemplateResponse, - Awaitable[templates.LaunchTemplateResponse] - ]]: - raise NotImplementedError() - - @property - def get_template(self) -> Callable[ - [templates.GetTemplateRequest], - Union[ - templates.GetTemplateResponse, - Awaitable[templates.GetTemplateResponse] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'TemplatesServiceTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py deleted file mode 100644 index 93faab5..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc.py +++ /dev/null @@ -1,318 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates -from .base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO - - -class TemplatesServiceGrpcTransport(TemplatesServiceTransport): - """gRPC backend transport for TemplatesService. - - Provides a method to create Cloud Dataflow jobs from - templates. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_job_from_template(self) -> Callable[ - [templates.CreateJobFromTemplateRequest], - jobs.Job]: - r"""Return a callable for the create job from template method over gRPC. - - Creates a Cloud Dataflow job from a template. - - Returns: - Callable[[~.CreateJobFromTemplateRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_from_template' not in self._stubs: - self._stubs['create_job_from_template'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.TemplatesService/CreateJobFromTemplate', - request_serializer=templates.CreateJobFromTemplateRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['create_job_from_template'] - - @property - def launch_template(self) -> Callable[ - [templates.LaunchTemplateRequest], - templates.LaunchTemplateResponse]: - r"""Return a callable for the launch template method over gRPC. - - Launch a template. - - Returns: - Callable[[~.LaunchTemplateRequest], - ~.LaunchTemplateResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'launch_template' not in self._stubs: - self._stubs['launch_template'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.TemplatesService/LaunchTemplate', - request_serializer=templates.LaunchTemplateRequest.serialize, - response_deserializer=templates.LaunchTemplateResponse.deserialize, - ) - return self._stubs['launch_template'] - - @property - def get_template(self) -> Callable[ - [templates.GetTemplateRequest], - templates.GetTemplateResponse]: - r"""Return a callable for the get template method over gRPC. - - Get the template associated with a template. - - Returns: - Callable[[~.GetTemplateRequest], - ~.GetTemplateResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_template' not in self._stubs: - self._stubs['get_template'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.TemplatesService/GetTemplate', - request_serializer=templates.GetTemplateRequest.serialize, - response_deserializer=templates.GetTemplateResponse.deserialize, - ) - return self._stubs['get_template'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'TemplatesServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py deleted file mode 100644 index d7b5758..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,317 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates -from .base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import TemplatesServiceGrpcTransport - - -class TemplatesServiceGrpcAsyncIOTransport(TemplatesServiceTransport): - """gRPC AsyncIO backend transport for TemplatesService. - - Provides a method to create Cloud Dataflow jobs from - templates. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_job_from_template(self) -> Callable[ - [templates.CreateJobFromTemplateRequest], - Awaitable[jobs.Job]]: - r"""Return a callable for the create job from template method over gRPC. - - Creates a Cloud Dataflow job from a template. - - Returns: - Callable[[~.CreateJobFromTemplateRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_from_template' not in self._stubs: - self._stubs['create_job_from_template'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.TemplatesService/CreateJobFromTemplate', - request_serializer=templates.CreateJobFromTemplateRequest.serialize, - response_deserializer=jobs.Job.deserialize, - ) - return self._stubs['create_job_from_template'] - - @property - def launch_template(self) -> Callable[ - [templates.LaunchTemplateRequest], - Awaitable[templates.LaunchTemplateResponse]]: - r"""Return a callable for the launch template method over gRPC. - - Launch a template. - - Returns: - Callable[[~.LaunchTemplateRequest], - Awaitable[~.LaunchTemplateResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'launch_template' not in self._stubs: - self._stubs['launch_template'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.TemplatesService/LaunchTemplate', - request_serializer=templates.LaunchTemplateRequest.serialize, - response_deserializer=templates.LaunchTemplateResponse.deserialize, - ) - return self._stubs['launch_template'] - - @property - def get_template(self) -> Callable[ - [templates.GetTemplateRequest], - Awaitable[templates.GetTemplateResponse]]: - r"""Return a callable for the get template method over gRPC. - - Get the template associated with a template. - - Returns: - Callable[[~.GetTemplateRequest], - Awaitable[~.GetTemplateResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_template' not in self._stubs: - self._stubs['get_template'] = self.grpc_channel.unary_unary( - '/google.dataflow.v1beta3.TemplatesService/GetTemplate', - request_serializer=templates.GetTemplateRequest.serialize, - response_deserializer=templates.GetTemplateResponse.deserialize, - ) - return self._stubs['get_template'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'TemplatesServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py deleted file mode 100644 index c704eb5..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/services/templates_service/transports/rest.py +++ /dev/null @@ -1,528 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates - -from .base import TemplatesServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class TemplatesServiceRestInterceptor: - """Interceptor for TemplatesService. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the TemplatesServiceRestTransport. - - .. code-block:: python - class MyCustomTemplatesServiceInterceptor(TemplatesServiceRestInterceptor): - def pre_create_job_from_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_job_from_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_template(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_launch_template(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_launch_template(self, response): - logging.log(f"Received response: {response}") - return response - - transport = TemplatesServiceRestTransport(interceptor=MyCustomTemplatesServiceInterceptor()) - client = TemplatesServiceClient(transport=transport) - - - """ - def pre_create_job_from_template(self, request: templates.CreateJobFromTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[templates.CreateJobFromTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_job_from_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the TemplatesService server. - """ - return request, metadata - - def post_create_job_from_template(self, response: jobs.Job) -> jobs.Job: - """Post-rpc interceptor for create_job_from_template - - Override in a subclass to manipulate the response - after it is returned by the TemplatesService server but before - it is returned to user code. - """ - return response - def pre_get_template(self, request: templates.GetTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[templates.GetTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the TemplatesService server. - """ - return request, metadata - - def post_get_template(self, response: templates.GetTemplateResponse) -> templates.GetTemplateResponse: - """Post-rpc interceptor for get_template - - Override in a subclass to manipulate the response - after it is returned by the TemplatesService server but before - it is returned to user code. - """ - return response - def pre_launch_template(self, request: templates.LaunchTemplateRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[templates.LaunchTemplateRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for launch_template - - Override in a subclass to manipulate the request or metadata - before they are sent to the TemplatesService server. - """ - return request, metadata - - def post_launch_template(self, response: templates.LaunchTemplateResponse) -> templates.LaunchTemplateResponse: - """Post-rpc interceptor for launch_template - - Override in a subclass to manipulate the response - after it is returned by the TemplatesService server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class TemplatesServiceRestStub: - _session: AuthorizedSession - _host: str - _interceptor: TemplatesServiceRestInterceptor - - -class TemplatesServiceRestTransport(TemplatesServiceTransport): - """REST backend transport for TemplatesService. - - Provides a method to create Cloud Dataflow jobs from - templates. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__(self, *, - host: str = 'dataflow.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[TemplatesServiceRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or TemplatesServiceRestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateJobFromTemplate(TemplatesServiceRestStub): - def __hash__(self): - return hash("CreateJobFromTemplate") - - def __call__(self, - request: templates.CreateJobFromTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> jobs.Job: - r"""Call the create job from template method over HTTP. - - Args: - request (~.templates.CreateJobFromTemplateRequest): - The request object. A request to create a Cloud Dataflow - job from a template. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.jobs.Job: - Defines a job to be run by the Cloud - Dataflow service. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/templates', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/templates', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_create_job_from_template(request, metadata) - pb_request = templates.CreateJobFromTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = jobs.Job() - pb_resp = jobs.Job.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_job_from_template(resp) - return resp - - class _GetTemplate(TemplatesServiceRestStub): - def __hash__(self): - return hash("GetTemplate") - - def __call__(self, - request: templates.GetTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> templates.GetTemplateResponse: - r"""Call the get template method over HTTP. - - Args: - request (~.templates.GetTemplateRequest): - The request object. A request to retrieve a Cloud - Dataflow job template. - - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.templates.GetTemplateResponse: - The response to a GetTemplate - request. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/templates:get', - }, -{ - 'method': 'get', - 'uri': '/v1b3/projects/{project_id}/templates:get', - }, - ] - request, metadata = self._interceptor.pre_get_template(request, metadata) - pb_request = templates.GetTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = templates.GetTemplateResponse() - pb_resp = templates.GetTemplateResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_template(resp) - return resp - - class _LaunchTemplate(TemplatesServiceRestStub): - def __hash__(self): - return hash("LaunchTemplate") - - def __call__(self, - request: templates.LaunchTemplateRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> templates.LaunchTemplateResponse: - r"""Call the launch template method over HTTP. - - Args: - request (~.templates.LaunchTemplateRequest): - The request object. A request to launch a template. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.templates.LaunchTemplateResponse: - Response to the request to launch a - template. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/locations/{location}/templates:launch', - 'body': 'launch_parameters', - }, -{ - 'method': 'post', - 'uri': '/v1b3/projects/{project_id}/templates:launch', - 'body': 'launch_parameters', - }, - ] - request, metadata = self._interceptor.pre_launch_template(request, metadata) - pb_request = templates.LaunchTemplateRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=True - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=True, - )) - - query_params["$alt"] = "json;enum-encoding=int" - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = templates.LaunchTemplateResponse() - pb_resp = templates.LaunchTemplateResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_launch_template(resp) - return resp - - @property - def create_job_from_template(self) -> Callable[ - [templates.CreateJobFromTemplateRequest], - jobs.Job]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateJobFromTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_template(self) -> Callable[ - [templates.GetTemplateRequest], - templates.GetTemplateResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def launch_template(self) -> Callable[ - [templates.LaunchTemplateRequest], - templates.LaunchTemplateResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._LaunchTemplate(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'TemplatesServiceRestTransport', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/__init__.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/__init__.py deleted file mode 100644 index 938458a..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/__init__.py +++ /dev/null @@ -1,242 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .environment import ( - AutoscalingSettings, - DebugOptions, - Disk, - Environment, - Package, - SdkHarnessContainerImage, - TaskRunnerSettings, - WorkerPool, - WorkerSettings, - AutoscalingAlgorithm, - DefaultPackageSet, - FlexResourceSchedulingGoal, - JobType, - ShuffleMode, - TeardownPolicy, - WorkerIPAddressConfiguration, -) -from .jobs import ( - BigQueryIODetails, - BigTableIODetails, - CheckActiveJobsRequest, - CheckActiveJobsResponse, - CreateJobRequest, - DatastoreIODetails, - DisplayData, - ExecutionStageState, - ExecutionStageSummary, - FailedLocation, - FileIODetails, - GetJobRequest, - Job, - JobExecutionInfo, - JobExecutionStageInfo, - JobMetadata, - ListJobsRequest, - ListJobsResponse, - PipelineDescription, - PubSubIODetails, - SdkVersion, - SnapshotJobRequest, - SpannerIODetails, - Step, - TransformSummary, - UpdateJobRequest, - JobState, - JobView, - KindType, -) -from .messages import ( - AutoscalingEvent, - JobMessage, - ListJobMessagesRequest, - ListJobMessagesResponse, - StructuredMessage, - JobMessageImportance, -) -from .metrics import ( - GetJobExecutionDetailsRequest, - GetJobMetricsRequest, - GetStageExecutionDetailsRequest, - JobExecutionDetails, - JobMetrics, - MetricStructuredName, - MetricUpdate, - ProgressTimeseries, - StageExecutionDetails, - StageSummary, - WorkerDetails, - WorkItemDetails, - ExecutionState, -) -from .snapshots import ( - DeleteSnapshotRequest, - DeleteSnapshotResponse, - GetSnapshotRequest, - ListSnapshotsRequest, - ListSnapshotsResponse, - PubsubSnapshotMetadata, - Snapshot, - SnapshotState, -) -from .streaming import ( - ComputationTopology, - CustomSourceLocation, - DataDiskAssignment, - KeyRangeDataDiskAssignment, - KeyRangeLocation, - MountedDataDisk, - PubsubLocation, - StateFamilyConfig, - StreamingApplianceSnapshotConfig, - StreamingComputationRanges, - StreamingSideInputLocation, - StreamingStageLocation, - StreamLocation, - TopologyConfig, -) -from .templates import ( - ContainerSpec, - CreateJobFromTemplateRequest, - DynamicTemplateLaunchParams, - FlexTemplateRuntimeEnvironment, - GetTemplateRequest, - GetTemplateResponse, - InvalidTemplateParameters, - LaunchFlexTemplateParameter, - LaunchFlexTemplateRequest, - LaunchFlexTemplateResponse, - LaunchTemplateParameters, - LaunchTemplateRequest, - LaunchTemplateResponse, - ParameterMetadata, - RuntimeEnvironment, - RuntimeMetadata, - SDKInfo, - TemplateMetadata, - ParameterType, -) - -__all__ = ( - 'AutoscalingSettings', - 'DebugOptions', - 'Disk', - 'Environment', - 'Package', - 'SdkHarnessContainerImage', - 'TaskRunnerSettings', - 'WorkerPool', - 'WorkerSettings', - 'AutoscalingAlgorithm', - 'DefaultPackageSet', - 'FlexResourceSchedulingGoal', - 'JobType', - 'ShuffleMode', - 'TeardownPolicy', - 'WorkerIPAddressConfiguration', - 'BigQueryIODetails', - 'BigTableIODetails', - 'CheckActiveJobsRequest', - 'CheckActiveJobsResponse', - 'CreateJobRequest', - 'DatastoreIODetails', - 'DisplayData', - 'ExecutionStageState', - 'ExecutionStageSummary', - 'FailedLocation', - 'FileIODetails', - 'GetJobRequest', - 'Job', - 'JobExecutionInfo', - 'JobExecutionStageInfo', - 'JobMetadata', - 'ListJobsRequest', - 'ListJobsResponse', - 'PipelineDescription', - 'PubSubIODetails', - 'SdkVersion', - 'SnapshotJobRequest', - 'SpannerIODetails', - 'Step', - 'TransformSummary', - 'UpdateJobRequest', - 'JobState', - 'JobView', - 'KindType', - 'AutoscalingEvent', - 'JobMessage', - 'ListJobMessagesRequest', - 'ListJobMessagesResponse', - 'StructuredMessage', - 'JobMessageImportance', - 'GetJobExecutionDetailsRequest', - 'GetJobMetricsRequest', - 'GetStageExecutionDetailsRequest', - 'JobExecutionDetails', - 'JobMetrics', - 'MetricStructuredName', - 'MetricUpdate', - 'ProgressTimeseries', - 'StageExecutionDetails', - 'StageSummary', - 'WorkerDetails', - 'WorkItemDetails', - 'ExecutionState', - 'DeleteSnapshotRequest', - 'DeleteSnapshotResponse', - 'GetSnapshotRequest', - 'ListSnapshotsRequest', - 'ListSnapshotsResponse', - 'PubsubSnapshotMetadata', - 'Snapshot', - 'SnapshotState', - 'ComputationTopology', - 'CustomSourceLocation', - 'DataDiskAssignment', - 'KeyRangeDataDiskAssignment', - 'KeyRangeLocation', - 'MountedDataDisk', - 'PubsubLocation', - 'StateFamilyConfig', - 'StreamingApplianceSnapshotConfig', - 'StreamingComputationRanges', - 'StreamingSideInputLocation', - 'StreamingStageLocation', - 'StreamLocation', - 'TopologyConfig', - 'ContainerSpec', - 'CreateJobFromTemplateRequest', - 'DynamicTemplateLaunchParams', - 'FlexTemplateRuntimeEnvironment', - 'GetTemplateRequest', - 'GetTemplateResponse', - 'InvalidTemplateParameters', - 'LaunchFlexTemplateParameter', - 'LaunchFlexTemplateRequest', - 'LaunchFlexTemplateResponse', - 'LaunchTemplateParameters', - 'LaunchTemplateRequest', - 'LaunchTemplateResponse', - 'ParameterMetadata', - 'RuntimeEnvironment', - 'RuntimeMetadata', - 'SDKInfo', - 'TemplateMetadata', - 'ParameterType', -) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/environment.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/environment.py deleted file mode 100644 index 5a494d7..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/environment.py +++ /dev/null @@ -1,891 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.dataflow.v1beta3', - manifest={ - 'JobType', - 'FlexResourceSchedulingGoal', - 'TeardownPolicy', - 'DefaultPackageSet', - 'AutoscalingAlgorithm', - 'WorkerIPAddressConfiguration', - 'ShuffleMode', - 'Environment', - 'Package', - 'Disk', - 'WorkerSettings', - 'TaskRunnerSettings', - 'AutoscalingSettings', - 'SdkHarnessContainerImage', - 'WorkerPool', - 'DebugOptions', - }, -) - - -class JobType(proto.Enum): - r"""Specifies the processing model used by a - [google.dataflow.v1beta3.Job], which determines the way the Job is - managed by the Cloud Dataflow service (how workers are scheduled, - how inputs are sharded, etc). - """ - JOB_TYPE_UNKNOWN = 0 - JOB_TYPE_BATCH = 1 - JOB_TYPE_STREAMING = 2 - - -class FlexResourceSchedulingGoal(proto.Enum): - r"""Specifies the resource to optimize for in Flexible Resource - Scheduling. - """ - FLEXRS_UNSPECIFIED = 0 - FLEXRS_SPEED_OPTIMIZED = 1 - FLEXRS_COST_OPTIMIZED = 2 - - -class TeardownPolicy(proto.Enum): - r"""Specifies what happens to a resource when a Cloud Dataflow - [google.dataflow.v1beta3.Job][google.dataflow.v1beta3.Job] has - completed. - """ - TEARDOWN_POLICY_UNKNOWN = 0 - TEARDOWN_ALWAYS = 1 - TEARDOWN_ON_SUCCESS = 2 - TEARDOWN_NEVER = 3 - - -class DefaultPackageSet(proto.Enum): - r"""The default set of packages to be staged on a pool of - workers. - """ - DEFAULT_PACKAGE_SET_UNKNOWN = 0 - DEFAULT_PACKAGE_SET_NONE = 1 - DEFAULT_PACKAGE_SET_JAVA = 2 - DEFAULT_PACKAGE_SET_PYTHON = 3 - - -class AutoscalingAlgorithm(proto.Enum): - r"""Specifies the algorithm used to determine the number of - worker processes to run at any given point in time, based on the - amount of data left to process, the number of workers, and how - quickly existing workers are processing data. - """ - AUTOSCALING_ALGORITHM_UNKNOWN = 0 - AUTOSCALING_ALGORITHM_NONE = 1 - AUTOSCALING_ALGORITHM_BASIC = 2 - - -class WorkerIPAddressConfiguration(proto.Enum): - r"""Specifies how IP addresses should be allocated to the worker - machines. - """ - WORKER_IP_UNSPECIFIED = 0 - WORKER_IP_PUBLIC = 1 - WORKER_IP_PRIVATE = 2 - - -class ShuffleMode(proto.Enum): - r"""Specifies the shuffle mode used by a [google.dataflow.v1beta3.Job], - which determines the approach data is shuffled during processing. - More details in: - https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#dataflow-shuffle - """ - SHUFFLE_MODE_UNSPECIFIED = 0 - VM_BASED = 1 - SERVICE_BASED = 2 - - -class Environment(proto.Message): - r"""Describes the environment in which a Dataflow Job runs. - - Attributes: - temp_storage_prefix (str): - The prefix of the resources the system should use for - temporary storage. The system will append the suffix - "/temp-{JOBNAME} to this resource prefix, where {JOBNAME} is - the value of the job_name field. The resulting bucket and - object prefix is used as the prefix of the resources used to - store temporary data needed during the job execution. NOTE: - This will override the value in taskrunner_settings. The - supported resource type is: - - Google Cloud Storage: - - storage.googleapis.com/{bucket}/{object} - bucket.storage.googleapis.com/{object} - cluster_manager_api_service (str): - The type of cluster manager API to use. If - unknown or unspecified, the service will attempt - to choose a reasonable default. This should be - in the form of the API service name, e.g. - "compute.googleapis.com". - experiments (MutableSequence[str]): - The list of experiments to enable. This field should be used - for SDK related experiments and not for service related - experiments. The proper field for service related - experiments is service_options. - service_options (MutableSequence[str]): - The list of service options to enable. This - field should be used for service related - experiments only. These experiments, when - graduating to GA, should be replaced by - dedicated fields or become default (i.e. always - on). - service_kms_key_name (str): - If set, contains the Cloud KMS key identifier used to - encrypt data at rest, AKA a Customer Managed Encryption Key - (CMEK). - - Format: - projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY - worker_pools (MutableSequence[google.cloud.dataflow_v1beta3.types.WorkerPool]): - The worker pools. At least one "harness" - worker pool must be specified in order for the - job to have workers. - user_agent (google.protobuf.struct_pb2.Struct): - A description of the process that generated - the request. - version (google.protobuf.struct_pb2.Struct): - A structure describing which components and - their versions of the service are required in - order to run the job. - dataset (str): - The dataset for the current project where - various workflow related tables are stored. - - The supported resource type is: - - Google BigQuery: - bigquery.googleapis.com/{dataset} - sdk_pipeline_options (google.protobuf.struct_pb2.Struct): - The Cloud Dataflow SDK pipeline options - specified by the user. These options are passed - through the service and are used to recreate the - SDK pipeline options on the worker in a language - agnostic and platform independent way. - internal_experiments (google.protobuf.any_pb2.Any): - Experimental settings. - service_account_email (str): - Identity to run virtual machines as. Defaults - to the default account. - flex_resource_scheduling_goal (google.cloud.dataflow_v1beta3.types.FlexResourceSchedulingGoal): - Which Flexible Resource Scheduling mode to - run in. - worker_region (str): - The Compute Engine region - (https://cloud.google.com/compute/docs/regions-zones/regions-zones) - in which worker processing should occur, e.g. "us-west1". - Mutually exclusive with worker_zone. If neither - worker_region nor worker_zone is specified, default to the - control plane's region. - worker_zone (str): - The Compute Engine zone - (https://cloud.google.com/compute/docs/regions-zones/regions-zones) - in which worker processing should occur, e.g. "us-west1-a". - Mutually exclusive with worker_region. If neither - worker_region nor worker_zone is specified, a zone in the - control plane's region is chosen based on available - capacity. - shuffle_mode (google.cloud.dataflow_v1beta3.types.ShuffleMode): - Output only. The shuffle mode used for the - job. - debug_options (google.cloud.dataflow_v1beta3.types.DebugOptions): - Any debugging options to be supplied to the - job. - """ - - temp_storage_prefix: str = proto.Field( - proto.STRING, - number=1, - ) - cluster_manager_api_service: str = proto.Field( - proto.STRING, - number=2, - ) - experiments: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - service_options: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=16, - ) - service_kms_key_name: str = proto.Field( - proto.STRING, - number=12, - ) - worker_pools: MutableSequence['WorkerPool'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='WorkerPool', - ) - user_agent: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=5, - message=struct_pb2.Struct, - ) - version: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=6, - message=struct_pb2.Struct, - ) - dataset: str = proto.Field( - proto.STRING, - number=7, - ) - sdk_pipeline_options: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=8, - message=struct_pb2.Struct, - ) - internal_experiments: any_pb2.Any = proto.Field( - proto.MESSAGE, - number=9, - message=any_pb2.Any, - ) - service_account_email: str = proto.Field( - proto.STRING, - number=10, - ) - flex_resource_scheduling_goal: 'FlexResourceSchedulingGoal' = proto.Field( - proto.ENUM, - number=11, - enum='FlexResourceSchedulingGoal', - ) - worker_region: str = proto.Field( - proto.STRING, - number=13, - ) - worker_zone: str = proto.Field( - proto.STRING, - number=14, - ) - shuffle_mode: 'ShuffleMode' = proto.Field( - proto.ENUM, - number=15, - enum='ShuffleMode', - ) - debug_options: 'DebugOptions' = proto.Field( - proto.MESSAGE, - number=17, - message='DebugOptions', - ) - - -class Package(proto.Message): - r"""The packages that must be installed in order for a worker to - run the steps of the Cloud Dataflow job that will be assigned to - its worker pool. - - This is the mechanism by which the Cloud Dataflow SDK causes - code to be loaded onto the workers. For example, the Cloud - Dataflow Java SDK might use this to install jars containing the - user's code and all of the various dependencies (libraries, data - files, etc.) required in order for that code to run. - - Attributes: - name (str): - The name of the package. - location (str): - The resource to read the package from. The - supported resource type is: - Google Cloud Storage: - - storage.googleapis.com/{bucket} - bucket.storage.googleapis.com/ - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - location: str = proto.Field( - proto.STRING, - number=2, - ) - - -class Disk(proto.Message): - r"""Describes the data disk used by a workflow job. - - Attributes: - size_gb (int): - Size of disk in GB. If zero or unspecified, - the service will attempt to choose a reasonable - default. - disk_type (str): - Disk storage type, as defined by Google - Compute Engine. This must be a disk type - appropriate to the project and zone in which the - workers will run. If unknown or unspecified, - the service will attempt to choose a reasonable - default. - - For example, the standard persistent disk type - is a resource name typically ending in - "pd-standard". If SSD persistent disks are - available, the resource name typically ends with - "pd-ssd". The actual valid values are defined - the Google Compute Engine API, not by the Cloud - Dataflow API; consult the Google Compute Engine - documentation for more information about - determining the set of available disk types for - a particular project and zone. - Google Compute Engine Disk types are local to a - particular project in a particular zone, and so - the resource name will typically look something - like this: - - compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard - mount_point (str): - Directory in a VM where disk is mounted. - """ - - size_gb: int = proto.Field( - proto.INT32, - number=1, - ) - disk_type: str = proto.Field( - proto.STRING, - number=2, - ) - mount_point: str = proto.Field( - proto.STRING, - number=3, - ) - - -class WorkerSettings(proto.Message): - r"""Provides data to pass through to the worker harness. - - Attributes: - base_url (str): - The base URL for accessing Google Cloud APIs. - When workers access Google Cloud APIs, they - logically do so via relative URLs. If this - field is specified, it supplies the base URL to - use for resolving these relative URLs. The - normative algorithm used is defined by RFC 1808, - "Relative Uniform Resource Locators". - - If not specified, the default value is - "http://www.googleapis.com/". - reporting_enabled (bool): - Whether to send work progress updates to the - service. - service_path (str): - The Cloud Dataflow service path relative to - the root URL, for example, - "dataflow/v1b3/projects". - shuffle_service_path (str): - The Shuffle service path relative to the root - URL, for example, "shuffle/v1beta1". - worker_id (str): - The ID of the worker running this pipeline. - temp_storage_prefix (str): - The prefix of the resources the system should - use for temporary storage. - - The supported resource type is: - - Google Cloud Storage: - - storage.googleapis.com/{bucket}/{object} - bucket.storage.googleapis.com/{object} - """ - - base_url: str = proto.Field( - proto.STRING, - number=1, - ) - reporting_enabled: bool = proto.Field( - proto.BOOL, - number=2, - ) - service_path: str = proto.Field( - proto.STRING, - number=3, - ) - shuffle_service_path: str = proto.Field( - proto.STRING, - number=4, - ) - worker_id: str = proto.Field( - proto.STRING, - number=5, - ) - temp_storage_prefix: str = proto.Field( - proto.STRING, - number=6, - ) - - -class TaskRunnerSettings(proto.Message): - r"""Taskrunner configuration settings. - - Attributes: - task_user (str): - The UNIX user ID on the worker VM to use for - tasks launched by taskrunner; e.g. "root". - task_group (str): - The UNIX group ID on the worker VM to use for - tasks launched by taskrunner; e.g. "wheel". - oauth_scopes (MutableSequence[str]): - The OAuth2 scopes to be requested by the - taskrunner in order to access the Cloud Dataflow - API. - base_url (str): - The base URL for the taskrunner to use when - accessing Google Cloud APIs. - When workers access Google Cloud APIs, they - logically do so via relative URLs. If this - field is specified, it supplies the base URL to - use for resolving these relative URLs. The - normative algorithm used is defined by RFC 1808, - "Relative Uniform Resource Locators". - - If not specified, the default value is - "http://www.googleapis.com/". - dataflow_api_version (str): - The API version of endpoint, e.g. "v1b3". - parallel_worker_settings (google.cloud.dataflow_v1beta3.types.WorkerSettings): - The settings to pass to the parallel worker - harness. - base_task_dir (str): - The location on the worker for task-specific - subdirectories. - continue_on_exception (bool): - Whether to continue taskrunner if an - exception is hit. - log_to_serialconsole (bool): - Whether to send taskrunner log info to Google - Compute Engine VM serial console. - alsologtostderr (bool): - Whether to also send taskrunner log info to - stderr. - log_upload_location (str): - Indicates where to put logs. If this is not - specified, the logs will not be uploaded. - - The supported resource type is: - - Google Cloud Storage: - storage.googleapis.com/{bucket}/{object} - bucket.storage.googleapis.com/{object} - log_dir (str): - The directory on the VM to store logs. - temp_storage_prefix (str): - The prefix of the resources the taskrunner - should use for temporary storage. - - The supported resource type is: - - Google Cloud Storage: - storage.googleapis.com/{bucket}/{object} - bucket.storage.googleapis.com/{object} - harness_command (str): - The command to launch the worker harness. - workflow_file_name (str): - The file to store the workflow in. - commandlines_file_name (str): - The file to store preprocessing commands in. - vm_id (str): - The ID string of the VM. - language_hint (str): - The suggested backend language. - streaming_worker_main_class (str): - The streaming worker main class name. - """ - - task_user: str = proto.Field( - proto.STRING, - number=1, - ) - task_group: str = proto.Field( - proto.STRING, - number=2, - ) - oauth_scopes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - base_url: str = proto.Field( - proto.STRING, - number=4, - ) - dataflow_api_version: str = proto.Field( - proto.STRING, - number=5, - ) - parallel_worker_settings: 'WorkerSettings' = proto.Field( - proto.MESSAGE, - number=6, - message='WorkerSettings', - ) - base_task_dir: str = proto.Field( - proto.STRING, - number=7, - ) - continue_on_exception: bool = proto.Field( - proto.BOOL, - number=8, - ) - log_to_serialconsole: bool = proto.Field( - proto.BOOL, - number=9, - ) - alsologtostderr: bool = proto.Field( - proto.BOOL, - number=10, - ) - log_upload_location: str = proto.Field( - proto.STRING, - number=11, - ) - log_dir: str = proto.Field( - proto.STRING, - number=12, - ) - temp_storage_prefix: str = proto.Field( - proto.STRING, - number=13, - ) - harness_command: str = proto.Field( - proto.STRING, - number=14, - ) - workflow_file_name: str = proto.Field( - proto.STRING, - number=15, - ) - commandlines_file_name: str = proto.Field( - proto.STRING, - number=16, - ) - vm_id: str = proto.Field( - proto.STRING, - number=17, - ) - language_hint: str = proto.Field( - proto.STRING, - number=18, - ) - streaming_worker_main_class: str = proto.Field( - proto.STRING, - number=19, - ) - - -class AutoscalingSettings(proto.Message): - r"""Settings for WorkerPool autoscaling. - - Attributes: - algorithm (google.cloud.dataflow_v1beta3.types.AutoscalingAlgorithm): - The algorithm to use for autoscaling. - max_num_workers (int): - The maximum number of workers to cap scaling - at. - """ - - algorithm: 'AutoscalingAlgorithm' = proto.Field( - proto.ENUM, - number=1, - enum='AutoscalingAlgorithm', - ) - max_num_workers: int = proto.Field( - proto.INT32, - number=2, - ) - - -class SdkHarnessContainerImage(proto.Message): - r"""Defines a SDK harness container for executing Dataflow - pipelines. - - Attributes: - container_image (str): - A docker container image that resides in - Google Container Registry. - use_single_core_per_container (bool): - If true, recommends the Dataflow service to - use only one core per SDK container instance - with this image. If false (or unset) recommends - using more than one core per SDK container - instance with this image for efficiency. Note - that Dataflow service may choose to override - this property if needed. - environment_id (str): - Environment ID for the Beam runner API proto - Environment that corresponds to the current SDK - Harness. - capabilities (MutableSequence[str]): - The set of capabilities enumerated in the above Environment - proto. See also - https://github.com/apache/beam/blob/master/model/pipeline/src/main/proto/beam_runner_api.proto - """ - - container_image: str = proto.Field( - proto.STRING, - number=1, - ) - use_single_core_per_container: bool = proto.Field( - proto.BOOL, - number=2, - ) - environment_id: str = proto.Field( - proto.STRING, - number=3, - ) - capabilities: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - - -class WorkerPool(proto.Message): - r"""Describes one particular pool of Cloud Dataflow workers to be - instantiated by the Cloud Dataflow service in order to perform - the computations required by a job. Note that a workflow job - may use multiple pools, in order to match the various - computational requirements of the various stages of the job. - - Attributes: - kind (str): - The kind of the worker pool; currently only ``harness`` and - ``shuffle`` are supported. - num_workers (int): - Number of Google Compute Engine workers in - this pool needed to execute the job. If zero or - unspecified, the service will attempt to choose - a reasonable default. - packages (MutableSequence[google.cloud.dataflow_v1beta3.types.Package]): - Packages to be installed on workers. - default_package_set (google.cloud.dataflow_v1beta3.types.DefaultPackageSet): - The default package set to install. This - allows the service to select a default set of - packages which are useful to worker harnesses - written in a particular language. - machine_type (str): - Machine type (e.g. "n1-standard-1"). If - empty or unspecified, the service will attempt - to choose a reasonable default. - teardown_policy (google.cloud.dataflow_v1beta3.types.TeardownPolicy): - Sets the policy for determining when to turndown worker - pool. Allowed values are: ``TEARDOWN_ALWAYS``, - ``TEARDOWN_ON_SUCCESS``, and ``TEARDOWN_NEVER``. - ``TEARDOWN_ALWAYS`` means workers are always torn down - regardless of whether the job succeeds. - ``TEARDOWN_ON_SUCCESS`` means workers are torn down if the - job succeeds. ``TEARDOWN_NEVER`` means the workers are never - torn down. - - If the workers are not torn down by the service, they will - continue to run and use Google Compute Engine VM resources - in the user's project until they are explicitly terminated - by the user. Because of this, Google recommends using the - ``TEARDOWN_ALWAYS`` policy except for small, manually - supervised test jobs. - - If unknown or unspecified, the service will attempt to - choose a reasonable default. - disk_size_gb (int): - Size of root disk for VMs, in GB. If zero or - unspecified, the service will attempt to choose - a reasonable default. - disk_type (str): - Type of root disk for VMs. If empty or - unspecified, the service will attempt to choose - a reasonable default. - disk_source_image (str): - Fully qualified source image for disks. - zone (str): - Zone to run the worker pools in. If empty or - unspecified, the service will attempt to choose - a reasonable default. - taskrunner_settings (google.cloud.dataflow_v1beta3.types.TaskRunnerSettings): - Settings passed through to Google Compute - Engine workers when using the standard Dataflow - task runner. Users should ignore this field. - on_host_maintenance (str): - The action to take on host maintenance, as - defined by the Google Compute Engine API. - data_disks (MutableSequence[google.cloud.dataflow_v1beta3.types.Disk]): - Data disks that are used by a VM in this - workflow. - metadata (MutableMapping[str, str]): - Metadata to set on the Google Compute Engine - VMs. - autoscaling_settings (google.cloud.dataflow_v1beta3.types.AutoscalingSettings): - Settings for autoscaling of this WorkerPool. - pool_args (google.protobuf.any_pb2.Any): - Extra arguments for this worker pool. - network (str): - Network to which VMs will be assigned. If - empty or unspecified, the service will use the - network "default". - subnetwork (str): - Subnetwork to which VMs will be assigned, if - desired. Expected to be of the form - "regions/REGION/subnetworks/SUBNETWORK". - worker_harness_container_image (str): - Required. Docker container image that executes the Cloud - Dataflow worker harness, residing in Google Container - Registry. - - Deprecated for the Fn API path. Use - sdk_harness_container_images instead. - num_threads_per_worker (int): - The number of threads per worker harness. If - empty or unspecified, the service will choose a - number of threads (according to the number of - cores on the selected machine type for batch, or - 1 by convention for streaming). - ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): - Configuration for VM IPs. - sdk_harness_container_images (MutableSequence[google.cloud.dataflow_v1beta3.types.SdkHarnessContainerImage]): - Set of SDK harness containers needed to - execute this pipeline. This will only be set in - the Fn API path. For non-cross-language - pipelines this should have only one entry. - Cross-language pipelines will have two or more - entries. - """ - - kind: str = proto.Field( - proto.STRING, - number=1, - ) - num_workers: int = proto.Field( - proto.INT32, - number=2, - ) - packages: MutableSequence['Package'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Package', - ) - default_package_set: 'DefaultPackageSet' = proto.Field( - proto.ENUM, - number=4, - enum='DefaultPackageSet', - ) - machine_type: str = proto.Field( - proto.STRING, - number=5, - ) - teardown_policy: 'TeardownPolicy' = proto.Field( - proto.ENUM, - number=6, - enum='TeardownPolicy', - ) - disk_size_gb: int = proto.Field( - proto.INT32, - number=7, - ) - disk_type: str = proto.Field( - proto.STRING, - number=16, - ) - disk_source_image: str = proto.Field( - proto.STRING, - number=8, - ) - zone: str = proto.Field( - proto.STRING, - number=9, - ) - taskrunner_settings: 'TaskRunnerSettings' = proto.Field( - proto.MESSAGE, - number=10, - message='TaskRunnerSettings', - ) - on_host_maintenance: str = proto.Field( - proto.STRING, - number=11, - ) - data_disks: MutableSequence['Disk'] = proto.RepeatedField( - proto.MESSAGE, - number=12, - message='Disk', - ) - metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=13, - ) - autoscaling_settings: 'AutoscalingSettings' = proto.Field( - proto.MESSAGE, - number=14, - message='AutoscalingSettings', - ) - pool_args: any_pb2.Any = proto.Field( - proto.MESSAGE, - number=15, - message=any_pb2.Any, - ) - network: str = proto.Field( - proto.STRING, - number=17, - ) - subnetwork: str = proto.Field( - proto.STRING, - number=19, - ) - worker_harness_container_image: str = proto.Field( - proto.STRING, - number=18, - ) - num_threads_per_worker: int = proto.Field( - proto.INT32, - number=20, - ) - ip_configuration: 'WorkerIPAddressConfiguration' = proto.Field( - proto.ENUM, - number=21, - enum='WorkerIPAddressConfiguration', - ) - sdk_harness_container_images: MutableSequence['SdkHarnessContainerImage'] = proto.RepeatedField( - proto.MESSAGE, - number=22, - message='SdkHarnessContainerImage', - ) - - -class DebugOptions(proto.Message): - r"""Describes any options that have an effect on the debugging of - pipelines. - - Attributes: - enable_hot_key_logging (bool): - When true, enables the logging of the literal - hot key to the user's Cloud Logging. - """ - - enable_hot_key_logging: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/jobs.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/jobs.py deleted file mode 100644 index 89be109..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/jobs.py +++ /dev/null @@ -1,1425 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataflow_v1beta3.types import environment as gd_environment -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.dataflow.v1beta3', - manifest={ - 'KindType', - 'JobState', - 'JobView', - 'Job', - 'DatastoreIODetails', - 'PubSubIODetails', - 'FileIODetails', - 'BigTableIODetails', - 'BigQueryIODetails', - 'SpannerIODetails', - 'SdkVersion', - 'JobMetadata', - 'ExecutionStageState', - 'PipelineDescription', - 'TransformSummary', - 'ExecutionStageSummary', - 'DisplayData', - 'Step', - 'JobExecutionInfo', - 'JobExecutionStageInfo', - 'CreateJobRequest', - 'GetJobRequest', - 'UpdateJobRequest', - 'ListJobsRequest', - 'FailedLocation', - 'ListJobsResponse', - 'SnapshotJobRequest', - 'CheckActiveJobsRequest', - 'CheckActiveJobsResponse', - }, -) - - -class KindType(proto.Enum): - r"""Type of transform or stage operation.""" - UNKNOWN_KIND = 0 - PAR_DO_KIND = 1 - GROUP_BY_KEY_KIND = 2 - FLATTEN_KIND = 3 - READ_KIND = 4 - WRITE_KIND = 5 - CONSTANT_KIND = 6 - SINGLETON_KIND = 7 - SHUFFLE_KIND = 8 - - -class JobState(proto.Enum): - r"""Describes the overall state of a - [google.dataflow.v1beta3.Job][google.dataflow.v1beta3.Job]. - """ - JOB_STATE_UNKNOWN = 0 - JOB_STATE_STOPPED = 1 - JOB_STATE_RUNNING = 2 - JOB_STATE_DONE = 3 - JOB_STATE_FAILED = 4 - JOB_STATE_CANCELLED = 5 - JOB_STATE_UPDATED = 6 - JOB_STATE_DRAINING = 7 - JOB_STATE_DRAINED = 8 - JOB_STATE_PENDING = 9 - JOB_STATE_CANCELLING = 10 - JOB_STATE_QUEUED = 11 - JOB_STATE_RESOURCE_CLEANING_UP = 12 - - -class JobView(proto.Enum): - r"""Selector for how much information is returned in Job - responses. - """ - JOB_VIEW_UNKNOWN = 0 - JOB_VIEW_SUMMARY = 1 - JOB_VIEW_ALL = 2 - JOB_VIEW_DESCRIPTION = 3 - - -class Job(proto.Message): - r"""Defines a job to be run by the Cloud Dataflow service. - - Attributes: - id (str): - The unique ID of this job. - This field is set by the Cloud Dataflow service - when the Job is created, and is immutable for - the life of the job. - project_id (str): - The ID of the Cloud Platform project that the - job belongs to. - name (str): - The user-specified Cloud Dataflow job name. - - Only one Job with a given name may exist in a project at any - given time. If a caller attempts to create a Job with the - same name as an already-existing Job, the attempt returns - the existing Job. - - The name must match the regular expression - ``[a-z]([-a-z0-9]{0,1022}[a-z0-9])?`` - type_ (google.cloud.dataflow_v1beta3.types.JobType): - The type of Cloud Dataflow job. - environment (google.cloud.dataflow_v1beta3.types.Environment): - The environment for the job. - steps (MutableSequence[google.cloud.dataflow_v1beta3.types.Step]): - Exactly one of step or steps_location should be specified. - - The top-level steps that constitute the entire job. Only - retrieved with JOB_VIEW_ALL. - steps_location (str): - The Cloud Storage location where the steps - are stored. - current_state (google.cloud.dataflow_v1beta3.types.JobState): - The current state of the job. - - Jobs are created in the ``JOB_STATE_STOPPED`` state unless - otherwise specified. - - A job in the ``JOB_STATE_RUNNING`` state may asynchronously - enter a terminal state. After a job has reached a terminal - state, no further state updates may be made. - - This field may be mutated by the Cloud Dataflow service; - callers cannot mutate it. - current_state_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp associated with the current - state. - requested_state (google.cloud.dataflow_v1beta3.types.JobState): - The job's requested state. - - ``UpdateJob`` may be used to switch between the - ``JOB_STATE_STOPPED`` and ``JOB_STATE_RUNNING`` states, by - setting requested_state. ``UpdateJob`` may also be used to - directly set a job's requested state to - ``JOB_STATE_CANCELLED`` or ``JOB_STATE_DONE``, irrevocably - terminating the job if it has not already reached a terminal - state. - execution_info (google.cloud.dataflow_v1beta3.types.JobExecutionInfo): - Deprecated. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp when the job was initially - created. Immutable and set by the Cloud Dataflow - service. - replace_job_id (str): - If this job is an update of an existing job, this field is - the job ID of the job it replaced. - - When sending a ``CreateJobRequest``, you can update a job by - specifying it here. The job named here is stopped, and its - intermediate state is transferred to this job. - transform_name_mapping (MutableMapping[str, str]): - The map of transform name prefixes of the job - to be replaced to the corresponding name - prefixes of the new job. - client_request_id (str): - The client's unique identifier of the job, - re-used across retried attempts. If this field - is set, the service will ensure its uniqueness. - The request to create a job will fail if the - service has knowledge of a previously submitted - job with the same client's ID and job name. The - caller may use this field to ensure idempotence - of job creation across retried attempts to - create a job. By default, the field is empty - and, in that case, the service ignores it. - replaced_by_job_id (str): - If another job is an update of this job (and thus, this job - is in ``JOB_STATE_UPDATED``), this field contains the ID of - that job. - temp_files (MutableSequence[str]): - A set of files the system should be aware of - that are used for temporary storage. These - temporary files will be removed on job - completion. - No duplicates are allowed. - No file patterns are supported. - - The supported files are: - - Google Cloud Storage: - - storage.googleapis.com/{bucket}/{object} - bucket.storage.googleapis.com/{object} - labels (MutableMapping[str, str]): - User-defined labels for this job. - - The labels map can contain no more than 64 entries. Entries - of the labels map are UTF8 strings that comply with the - following restrictions: - - - Keys must conform to regexp: - [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - - Values must conform to regexp: - [\p{Ll}\p{Lo}\p{N}_-]{0,63} - - Both keys and values are additionally constrained to be - <= 128 bytes in size. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains this job. - pipeline_description (google.cloud.dataflow_v1beta3.types.PipelineDescription): - Preliminary field: The format of this data may change at any - time. A description of the user pipeline and stages through - which it is executed. Created by Cloud Dataflow service. - Only retrieved with JOB_VIEW_DESCRIPTION or JOB_VIEW_ALL. - stage_states (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageState]): - This field may be mutated by the Cloud - Dataflow service; callers cannot mutate it. - job_metadata (google.cloud.dataflow_v1beta3.types.JobMetadata): - This field is populated by the Dataflow - service to support filtering jobs by the - metadata values provided here. Populated for - ListJobs and all GetJob views SUMMARY and - higher. - start_time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp when the job was started (transitioned to - JOB_STATE_PENDING). Flexible resource scheduling jobs are - started with some delay after job creation, so start_time is - unset before start and is updated when the job is started by - the Cloud Dataflow service. For other jobs, start_time - always equals to create_time and is immutable and set by the - Cloud Dataflow service. - created_from_snapshot_id (str): - If this is specified, the job's initial state - is populated from the given snapshot. - satisfies_pzs (bool): - Reserved for future use. This field is set - only in responses from the server; it is ignored - if it is set in any requests. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - name: str = proto.Field( - proto.STRING, - number=3, - ) - type_: gd_environment.JobType = proto.Field( - proto.ENUM, - number=4, - enum=gd_environment.JobType, - ) - environment: gd_environment.Environment = proto.Field( - proto.MESSAGE, - number=5, - message=gd_environment.Environment, - ) - steps: MutableSequence['Step'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Step', - ) - steps_location: str = proto.Field( - proto.STRING, - number=24, - ) - current_state: 'JobState' = proto.Field( - proto.ENUM, - number=7, - enum='JobState', - ) - current_state_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - requested_state: 'JobState' = proto.Field( - proto.ENUM, - number=9, - enum='JobState', - ) - execution_info: 'JobExecutionInfo' = proto.Field( - proto.MESSAGE, - number=10, - message='JobExecutionInfo', - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - replace_job_id: str = proto.Field( - proto.STRING, - number=12, - ) - transform_name_mapping: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=13, - ) - client_request_id: str = proto.Field( - proto.STRING, - number=14, - ) - replaced_by_job_id: str = proto.Field( - proto.STRING, - number=15, - ) - temp_files: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=16, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=17, - ) - location: str = proto.Field( - proto.STRING, - number=18, - ) - pipeline_description: 'PipelineDescription' = proto.Field( - proto.MESSAGE, - number=19, - message='PipelineDescription', - ) - stage_states: MutableSequence['ExecutionStageState'] = proto.RepeatedField( - proto.MESSAGE, - number=20, - message='ExecutionStageState', - ) - job_metadata: 'JobMetadata' = proto.Field( - proto.MESSAGE, - number=21, - message='JobMetadata', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=22, - message=timestamp_pb2.Timestamp, - ) - created_from_snapshot_id: str = proto.Field( - proto.STRING, - number=23, - ) - satisfies_pzs: bool = proto.Field( - proto.BOOL, - number=25, - ) - - -class DatastoreIODetails(proto.Message): - r"""Metadata for a Datastore connector used by the job. - - Attributes: - namespace (str): - Namespace used in the connection. - project_id (str): - ProjectId accessed in the connection. - """ - - namespace: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - - -class PubSubIODetails(proto.Message): - r"""Metadata for a Pub/Sub connector used by the job. - - Attributes: - topic (str): - Topic accessed in the connection. - subscription (str): - Subscription used in the connection. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - subscription: str = proto.Field( - proto.STRING, - number=2, - ) - - -class FileIODetails(proto.Message): - r"""Metadata for a File connector used by the job. - - Attributes: - file_pattern (str): - File Pattern used to access files by the - connector. - """ - - file_pattern: str = proto.Field( - proto.STRING, - number=1, - ) - - -class BigTableIODetails(proto.Message): - r"""Metadata for a Cloud Bigtable connector used by the job. - - Attributes: - project_id (str): - ProjectId accessed in the connection. - instance_id (str): - InstanceId accessed in the connection. - table_id (str): - TableId accessed in the connection. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - instance_id: str = proto.Field( - proto.STRING, - number=2, - ) - table_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class BigQueryIODetails(proto.Message): - r"""Metadata for a BigQuery connector used by the job. - - Attributes: - table (str): - Table accessed in the connection. - dataset (str): - Dataset accessed in the connection. - project_id (str): - Project accessed in the connection. - query (str): - Query used to access data in the connection. - """ - - table: str = proto.Field( - proto.STRING, - number=1, - ) - dataset: str = proto.Field( - proto.STRING, - number=2, - ) - project_id: str = proto.Field( - proto.STRING, - number=3, - ) - query: str = proto.Field( - proto.STRING, - number=4, - ) - - -class SpannerIODetails(proto.Message): - r"""Metadata for a Spanner connector used by the job. - - Attributes: - project_id (str): - ProjectId accessed in the connection. - instance_id (str): - InstanceId accessed in the connection. - database_id (str): - DatabaseId accessed in the connection. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - instance_id: str = proto.Field( - proto.STRING, - number=2, - ) - database_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class SdkVersion(proto.Message): - r"""The version of the SDK used to run the job. - - Attributes: - version (str): - The version of the SDK used to run the job. - version_display_name (str): - A readable string describing the version of - the SDK. - sdk_support_status (google.cloud.dataflow_v1beta3.types.SdkVersion.SdkSupportStatus): - The support status for this SDK version. - """ - class SdkSupportStatus(proto.Enum): - r"""The support status of the SDK used to run the job.""" - UNKNOWN = 0 - SUPPORTED = 1 - STALE = 2 - DEPRECATED = 3 - UNSUPPORTED = 4 - - version: str = proto.Field( - proto.STRING, - number=1, - ) - version_display_name: str = proto.Field( - proto.STRING, - number=2, - ) - sdk_support_status: SdkSupportStatus = proto.Field( - proto.ENUM, - number=3, - enum=SdkSupportStatus, - ) - - -class JobMetadata(proto.Message): - r"""Metadata available primarily for filtering jobs. Will be - included in the ListJob response and Job SUMMARY view. - - Attributes: - sdk_version (google.cloud.dataflow_v1beta3.types.SdkVersion): - The SDK version used to run the job. - spanner_details (MutableSequence[google.cloud.dataflow_v1beta3.types.SpannerIODetails]): - Identification of a Spanner source used in - the Dataflow job. - bigquery_details (MutableSequence[google.cloud.dataflow_v1beta3.types.BigQueryIODetails]): - Identification of a BigQuery source used in - the Dataflow job. - big_table_details (MutableSequence[google.cloud.dataflow_v1beta3.types.BigTableIODetails]): - Identification of a Cloud Bigtable source - used in the Dataflow job. - pubsub_details (MutableSequence[google.cloud.dataflow_v1beta3.types.PubSubIODetails]): - Identification of a Pub/Sub source used in - the Dataflow job. - file_details (MutableSequence[google.cloud.dataflow_v1beta3.types.FileIODetails]): - Identification of a File source used in the - Dataflow job. - datastore_details (MutableSequence[google.cloud.dataflow_v1beta3.types.DatastoreIODetails]): - Identification of a Datastore source used in - the Dataflow job. - """ - - sdk_version: 'SdkVersion' = proto.Field( - proto.MESSAGE, - number=1, - message='SdkVersion', - ) - spanner_details: MutableSequence['SpannerIODetails'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='SpannerIODetails', - ) - bigquery_details: MutableSequence['BigQueryIODetails'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='BigQueryIODetails', - ) - big_table_details: MutableSequence['BigTableIODetails'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='BigTableIODetails', - ) - pubsub_details: MutableSequence['PubSubIODetails'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='PubSubIODetails', - ) - file_details: MutableSequence['FileIODetails'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='FileIODetails', - ) - datastore_details: MutableSequence['DatastoreIODetails'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='DatastoreIODetails', - ) - - -class ExecutionStageState(proto.Message): - r"""A message describing the state of a particular execution - stage. - - Attributes: - execution_stage_name (str): - The name of the execution stage. - execution_stage_state (google.cloud.dataflow_v1beta3.types.JobState): - Executions stage states allow the same set of - values as JobState. - current_state_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the stage transitioned to - this state. - """ - - execution_stage_name: str = proto.Field( - proto.STRING, - number=1, - ) - execution_stage_state: 'JobState' = proto.Field( - proto.ENUM, - number=2, - enum='JobState', - ) - current_state_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class PipelineDescription(proto.Message): - r"""A descriptive representation of submitted pipeline as well as - the executed form. This data is provided by the Dataflow - service for ease of visualizing the pipeline and interpreting - Dataflow provided metrics. - - Attributes: - original_pipeline_transform (MutableSequence[google.cloud.dataflow_v1beta3.types.TransformSummary]): - Description of each transform in the pipeline - and collections between them. - execution_pipeline_stage (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary]): - Description of each stage of execution of the - pipeline. - display_data (MutableSequence[google.cloud.dataflow_v1beta3.types.DisplayData]): - Pipeline level display data. - """ - - original_pipeline_transform: MutableSequence['TransformSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='TransformSummary', - ) - execution_pipeline_stage: MutableSequence['ExecutionStageSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ExecutionStageSummary', - ) - display_data: MutableSequence['DisplayData'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='DisplayData', - ) - - -class TransformSummary(proto.Message): - r"""Description of the type, names/ids, and input/outputs for a - transform. - - Attributes: - kind (google.cloud.dataflow_v1beta3.types.KindType): - Type of transform. - id (str): - SDK generated id of this transform instance. - name (str): - User provided name for this transform - instance. - display_data (MutableSequence[google.cloud.dataflow_v1beta3.types.DisplayData]): - Transform-specific display data. - output_collection_name (MutableSequence[str]): - User names for all collection outputs to - this transform. - input_collection_name (MutableSequence[str]): - User names for all collection inputs to this - transform. - """ - - kind: 'KindType' = proto.Field( - proto.ENUM, - number=1, - enum='KindType', - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - name: str = proto.Field( - proto.STRING, - number=3, - ) - display_data: MutableSequence['DisplayData'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='DisplayData', - ) - output_collection_name: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - input_collection_name: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=6, - ) - - -class ExecutionStageSummary(proto.Message): - r"""Description of the composing transforms, names/ids, and - input/outputs of a stage of execution. Some composing - transforms and sources may have been generated by the Dataflow - service during execution planning. - - Attributes: - name (str): - Dataflow service generated name for this - stage. - id (str): - Dataflow service generated id for this stage. - kind (google.cloud.dataflow_v1beta3.types.KindType): - Type of transform this stage is executing. - input_source (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.StageSource]): - Input sources for this stage. - output_source (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.StageSource]): - Output sources for this stage. - prerequisite_stage (MutableSequence[str]): - Other stages that must complete before this - stage can run. - component_transform (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.ComponentTransform]): - Transforms that comprise this execution - stage. - component_source (MutableSequence[google.cloud.dataflow_v1beta3.types.ExecutionStageSummary.ComponentSource]): - Collections produced and consumed by - component transforms of this stage. - """ - - class StageSource(proto.Message): - r"""Description of an input or output of an execution stage. - - Attributes: - user_name (str): - Human-readable name for this source; may be - user or system generated. - name (str): - Dataflow service generated name for this - source. - original_transform_or_collection (str): - User name for the original user transform or - collection with which this source is most - closely associated. - size_bytes (int): - Size of the source, if measurable. - """ - - user_name: str = proto.Field( - proto.STRING, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - original_transform_or_collection: str = proto.Field( - proto.STRING, - number=3, - ) - size_bytes: int = proto.Field( - proto.INT64, - number=4, - ) - - class ComponentTransform(proto.Message): - r"""Description of a transform executed as part of an execution - stage. - - Attributes: - user_name (str): - Human-readable name for this transform; may - be user or system generated. - name (str): - Dataflow service generated name for this - source. - original_transform (str): - User name for the original user transform - with which this transform is most closely - associated. - """ - - user_name: str = proto.Field( - proto.STRING, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - original_transform: str = proto.Field( - proto.STRING, - number=3, - ) - - class ComponentSource(proto.Message): - r"""Description of an interstitial value between transforms in an - execution stage. - - Attributes: - user_name (str): - Human-readable name for this transform; may - be user or system generated. - name (str): - Dataflow service generated name for this - source. - original_transform_or_collection (str): - User name for the original user transform or - collection with which this source is most - closely associated. - """ - - user_name: str = proto.Field( - proto.STRING, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - original_transform_or_collection: str = proto.Field( - proto.STRING, - number=3, - ) - - name: str = proto.Field( - proto.STRING, - number=1, - ) - id: str = proto.Field( - proto.STRING, - number=2, - ) - kind: 'KindType' = proto.Field( - proto.ENUM, - number=3, - enum='KindType', - ) - input_source: MutableSequence[StageSource] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=StageSource, - ) - output_source: MutableSequence[StageSource] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=StageSource, - ) - prerequisite_stage: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) - component_transform: MutableSequence[ComponentTransform] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=ComponentTransform, - ) - component_source: MutableSequence[ComponentSource] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=ComponentSource, - ) - - -class DisplayData(proto.Message): - r"""Data provided with a pipeline or transform to provide - descriptive info. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - key (str): - The key identifying the display data. - This is intended to be used as a label for the - display data when viewed in a dax monitoring - system. - namespace (str): - The namespace for the key. This is usually a - class name or programming language namespace - (i.e. python module) which defines the display - data. This allows a dax monitoring system to - specially handle the data and perform custom - rendering. - str_value (str): - Contains value if the data is of string type. - - This field is a member of `oneof`_ ``Value``. - int64_value (int): - Contains value if the data is of int64 type. - - This field is a member of `oneof`_ ``Value``. - float_value (float): - Contains value if the data is of float type. - - This field is a member of `oneof`_ ``Value``. - java_class_value (str): - Contains value if the data is of java class - type. - - This field is a member of `oneof`_ ``Value``. - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - Contains value if the data is of timestamp - type. - - This field is a member of `oneof`_ ``Value``. - duration_value (google.protobuf.duration_pb2.Duration): - Contains value if the data is of duration - type. - - This field is a member of `oneof`_ ``Value``. - bool_value (bool): - Contains value if the data is of a boolean - type. - - This field is a member of `oneof`_ ``Value``. - short_str_value (str): - A possible additional shorter value to display. For example - a java_class_name_value of com.mypackage.MyDoFn will be - stored with MyDoFn as the short_str_value and - com.mypackage.MyDoFn as the java_class_name value. - short_str_value can be displayed and java_class_name_value - will be displayed as a tooltip. - url (str): - An optional full URL. - label (str): - An optional label to display in a dax UI for - the element. - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - namespace: str = proto.Field( - proto.STRING, - number=2, - ) - str_value: str = proto.Field( - proto.STRING, - number=4, - oneof='Value', - ) - int64_value: int = proto.Field( - proto.INT64, - number=5, - oneof='Value', - ) - float_value: float = proto.Field( - proto.FLOAT, - number=6, - oneof='Value', - ) - java_class_value: str = proto.Field( - proto.STRING, - number=7, - oneof='Value', - ) - timestamp_value: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - oneof='Value', - message=timestamp_pb2.Timestamp, - ) - duration_value: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=9, - oneof='Value', - message=duration_pb2.Duration, - ) - bool_value: bool = proto.Field( - proto.BOOL, - number=10, - oneof='Value', - ) - short_str_value: str = proto.Field( - proto.STRING, - number=11, - ) - url: str = proto.Field( - proto.STRING, - number=12, - ) - label: str = proto.Field( - proto.STRING, - number=13, - ) - - -class Step(proto.Message): - r"""Defines a particular step within a Cloud Dataflow job. - - A job consists of multiple steps, each of which performs some - specific operation as part of the overall job. Data is typically - passed from one step to another as part of the job. - - Here's an example of a sequence of steps which together implement a - Map-Reduce job: - - - Read a collection of data from some source, parsing the - collection's elements. - - - Validate the elements. - - - Apply a user-defined function to map each element to some value - and extract an element-specific key value. - - - Group elements with the same key into a single element with that - key, transforming a multiply-keyed collection into a - uniquely-keyed collection. - - - Write the elements out to some data sink. - - Note that the Cloud Dataflow service may be used to run many - different types of jobs, not just Map-Reduce. - - Attributes: - kind (str): - The kind of step in the Cloud Dataflow job. - name (str): - The name that identifies the step. This must - be unique for each step with respect to all - other steps in the Cloud Dataflow job. - properties (google.protobuf.struct_pb2.Struct): - Named properties associated with the step. Each kind of - predefined step has its own required set of properties. Must - be provided on Create. Only retrieved with JOB_VIEW_ALL. - """ - - kind: str = proto.Field( - proto.STRING, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - properties: struct_pb2.Struct = proto.Field( - proto.MESSAGE, - number=3, - message=struct_pb2.Struct, - ) - - -class JobExecutionInfo(proto.Message): - r"""Additional information about how a Cloud Dataflow job will be - executed that isn't contained in the submitted job. - - Attributes: - stages (MutableMapping[str, google.cloud.dataflow_v1beta3.types.JobExecutionStageInfo]): - A mapping from each stage to the information - about that stage. - """ - - stages: MutableMapping[str, 'JobExecutionStageInfo'] = proto.MapField( - proto.STRING, - proto.MESSAGE, - number=1, - message='JobExecutionStageInfo', - ) - - -class JobExecutionStageInfo(proto.Message): - r"""Contains information about how a particular - [google.dataflow.v1beta3.Step][google.dataflow.v1beta3.Step] will be - executed. - - Attributes: - step_name (MutableSequence[str]): - The steps associated with the execution - stage. Note that stages may have several steps, - and that a given step might be run by more than - one stage. - """ - - step_name: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class CreateJobRequest(proto.Message): - r"""Request to create a Cloud Dataflow job. - - Attributes: - project_id (str): - The ID of the Cloud Platform project that the - job belongs to. - job (google.cloud.dataflow_v1beta3.types.Job): - The job to create. - view (google.cloud.dataflow_v1beta3.types.JobView): - The level of information requested in - response. - replace_job_id (str): - Deprecated. This field is now in the Job - message. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains this job. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job: 'Job' = proto.Field( - proto.MESSAGE, - number=2, - message='Job', - ) - view: 'JobView' = proto.Field( - proto.ENUM, - number=3, - enum='JobView', - ) - replace_job_id: str = proto.Field( - proto.STRING, - number=4, - ) - location: str = proto.Field( - proto.STRING, - number=5, - ) - - -class GetJobRequest(proto.Message): - r"""Request to get the state of a Cloud Dataflow job. - - Attributes: - project_id (str): - The ID of the Cloud Platform project that the - job belongs to. - job_id (str): - The job ID. - view (google.cloud.dataflow_v1beta3.types.JobView): - The level of information requested in - response. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains this job. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - view: 'JobView' = proto.Field( - proto.ENUM, - number=3, - enum='JobView', - ) - location: str = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateJobRequest(proto.Message): - r"""Request to update a Cloud Dataflow job. - - Attributes: - project_id (str): - The ID of the Cloud Platform project that the - job belongs to. - job_id (str): - The job ID. - job (google.cloud.dataflow_v1beta3.types.Job): - The updated job. - Only the job state is updatable; other fields - will be ignored. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains this job. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - job: 'Job' = proto.Field( - proto.MESSAGE, - number=3, - message='Job', - ) - location: str = proto.Field( - proto.STRING, - number=4, - ) - - -class ListJobsRequest(proto.Message): - r"""Request to list Cloud Dataflow jobs. - - Attributes: - filter (google.cloud.dataflow_v1beta3.types.ListJobsRequest.Filter): - The kind of filter to use. - project_id (str): - The project which owns the jobs. - view (google.cloud.dataflow_v1beta3.types.JobView): - Deprecated. ListJobs always returns summaries - now. Use GetJob for other JobViews. - page_size (int): - If there are many jobs, limit response to at most this many. - The actual number of jobs returned will be the lesser of - max_responses and an unspecified server-defined limit. - page_token (str): - Set this to the 'next_page_token' field of a previous - response to request additional results in a long list. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains this job. - """ - class Filter(proto.Enum): - r"""This field filters out and returns jobs in the specified job - state. The order of data returned is determined by the filter - used, and is subject to change. - """ - UNKNOWN = 0 - ALL = 1 - TERMINATED = 2 - ACTIVE = 3 - - filter: Filter = proto.Field( - proto.ENUM, - number=5, - enum=Filter, - ) - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - view: 'JobView' = proto.Field( - proto.ENUM, - number=2, - enum='JobView', - ) - page_size: int = proto.Field( - proto.INT32, - number=3, - ) - page_token: str = proto.Field( - proto.STRING, - number=4, - ) - location: str = proto.Field( - proto.STRING, - number=17, - ) - - -class FailedLocation(proto.Message): - r"""Indicates which [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - failed to respond to a request for data. - - Attributes: - name (str): - The name of the [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that failed to respond. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class ListJobsResponse(proto.Message): - r"""Response to a request to list Cloud Dataflow jobs in a - project. This might be a partial response, depending on the page - size in the ListJobsRequest. However, if the project does not - have any jobs, an instance of ListJobsResponse is not returned - and the requests's response body is empty {}. - - Attributes: - jobs (MutableSequence[google.cloud.dataflow_v1beta3.types.Job]): - A subset of the requested job information. - next_page_token (str): - Set if there may be more results than fit in - this response. - failed_location (MutableSequence[google.cloud.dataflow_v1beta3.types.FailedLocation]): - Zero or more messages describing the [regional endpoints] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that failed to respond. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence['Job'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Job', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - failed_location: MutableSequence['FailedLocation'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='FailedLocation', - ) - - -class SnapshotJobRequest(proto.Message): - r"""Request to create a snapshot of a job. - - Attributes: - project_id (str): - The project which owns the job to be - snapshotted. - job_id (str): - The job to be snapshotted. - ttl (google.protobuf.duration_pb2.Duration): - TTL for the snapshot. - location (str): - The location that contains this job. - snapshot_sources (bool): - If true, perform snapshots for sources which - support this. - description (str): - User specified description of the snapshot. - Maybe empty. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - ttl: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - location: str = proto.Field( - proto.STRING, - number=4, - ) - snapshot_sources: bool = proto.Field( - proto.BOOL, - number=5, - ) - description: str = proto.Field( - proto.STRING, - number=6, - ) - - -class CheckActiveJobsRequest(proto.Message): - r"""Request to check is active jobs exists for a project - - Attributes: - project_id (str): - The project which owns the jobs. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - - -class CheckActiveJobsResponse(proto.Message): - r"""Response for CheckActiveJobsRequest. - - Attributes: - active_jobs_exist (bool): - If True, active jobs exists for project. - False otherwise. - """ - - active_jobs_exist: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/messages.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/messages.py deleted file mode 100644 index ac4db6d..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/messages.py +++ /dev/null @@ -1,302 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.dataflow.v1beta3', - manifest={ - 'JobMessageImportance', - 'JobMessage', - 'StructuredMessage', - 'AutoscalingEvent', - 'ListJobMessagesRequest', - 'ListJobMessagesResponse', - }, -) - - -class JobMessageImportance(proto.Enum): - r"""Indicates the importance of the message.""" - JOB_MESSAGE_IMPORTANCE_UNKNOWN = 0 - JOB_MESSAGE_DEBUG = 1 - JOB_MESSAGE_DETAILED = 2 - JOB_MESSAGE_BASIC = 5 - JOB_MESSAGE_WARNING = 3 - JOB_MESSAGE_ERROR = 4 - - -class JobMessage(proto.Message): - r"""A particular message pertaining to a Dataflow job. - - Attributes: - id (str): - Deprecated. - time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp of the message. - message_text (str): - The text of the message. - message_importance (google.cloud.dataflow_v1beta3.types.JobMessageImportance): - Importance level of the message. - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - message_text: str = proto.Field( - proto.STRING, - number=3, - ) - message_importance: 'JobMessageImportance' = proto.Field( - proto.ENUM, - number=4, - enum='JobMessageImportance', - ) - - -class StructuredMessage(proto.Message): - r"""A rich message format, including a human readable string, a - key for identifying the message, and structured data associated - with the message for programmatic consumption. - - Attributes: - message_text (str): - Human-readable version of message. - message_key (str): - Identifier for this message type. Used by - external systems to internationalize or - personalize message. - parameters (MutableSequence[google.cloud.dataflow_v1beta3.types.StructuredMessage.Parameter]): - The structured data associated with this - message. - """ - - class Parameter(proto.Message): - r"""Structured data associated with this message. - - Attributes: - key (str): - Key or name for this parameter. - value (google.protobuf.struct_pb2.Value): - Value for this parameter. - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - value: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=2, - message=struct_pb2.Value, - ) - - message_text: str = proto.Field( - proto.STRING, - number=1, - ) - message_key: str = proto.Field( - proto.STRING, - number=2, - ) - parameters: MutableSequence[Parameter] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=Parameter, - ) - - -class AutoscalingEvent(proto.Message): - r"""A structured message reporting an autoscaling decision made - by the Dataflow service. - - Attributes: - current_num_workers (int): - The current number of workers the job has. - target_num_workers (int): - The target number of workers the worker pool - wants to resize to use. - event_type (google.cloud.dataflow_v1beta3.types.AutoscalingEvent.AutoscalingEventType): - The type of autoscaling event to report. - description (google.cloud.dataflow_v1beta3.types.StructuredMessage): - A message describing why the system decided - to adjust the current number of workers, why it - failed, or why the system decided to not make - any changes to the number of workers. - time (google.protobuf.timestamp_pb2.Timestamp): - The time this event was emitted to indicate a new target or - current num_workers value. - worker_pool (str): - A short and friendly name for the worker pool - this event refers to. - """ - class AutoscalingEventType(proto.Enum): - r"""Indicates the type of autoscaling event.""" - TYPE_UNKNOWN = 0 - TARGET_NUM_WORKERS_CHANGED = 1 - CURRENT_NUM_WORKERS_CHANGED = 2 - ACTUATION_FAILURE = 3 - NO_CHANGE = 4 - - current_num_workers: int = proto.Field( - proto.INT64, - number=1, - ) - target_num_workers: int = proto.Field( - proto.INT64, - number=2, - ) - event_type: AutoscalingEventType = proto.Field( - proto.ENUM, - number=3, - enum=AutoscalingEventType, - ) - description: 'StructuredMessage' = proto.Field( - proto.MESSAGE, - number=4, - message='StructuredMessage', - ) - time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - worker_pool: str = proto.Field( - proto.STRING, - number=7, - ) - - -class ListJobMessagesRequest(proto.Message): - r"""Request to list job messages. Up to max_results messages will be - returned in the time range specified starting with the oldest - messages first. If no time range is specified the results with start - with the oldest message. - - Attributes: - project_id (str): - A project id. - job_id (str): - The job to get messages about. - minimum_importance (google.cloud.dataflow_v1beta3.types.JobMessageImportance): - Filter to only get messages with importance - >= level - page_size (int): - If specified, determines the maximum number - of messages to return. If unspecified, the - service may choose an appropriate default, or - may return an arbitrarily large number of - results. - page_token (str): - If supplied, this should be the value of next_page_token - returned by an earlier call. This will cause the next page - of results to be returned. - start_time (google.protobuf.timestamp_pb2.Timestamp): - If specified, return only messages with timestamps >= - start_time. The default is the job creation time (i.e. - beginning of messages). - end_time (google.protobuf.timestamp_pb2.Timestamp): - Return only messages with timestamps < end_time. The default - is now (i.e. return up to the latest messages available). - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains the job specified by job_id. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - minimum_importance: 'JobMessageImportance' = proto.Field( - proto.ENUM, - number=3, - enum='JobMessageImportance', - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - location: str = proto.Field( - proto.STRING, - number=8, - ) - - -class ListJobMessagesResponse(proto.Message): - r"""Response to a request to list job messages. - - Attributes: - job_messages (MutableSequence[google.cloud.dataflow_v1beta3.types.JobMessage]): - Messages in ascending timestamp order. - next_page_token (str): - The token to obtain the next page of results - if there are more. - autoscaling_events (MutableSequence[google.cloud.dataflow_v1beta3.types.AutoscalingEvent]): - Autoscaling events in ascending timestamp - order. - """ - - @property - def raw_page(self): - return self - - job_messages: MutableSequence['JobMessage'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='JobMessage', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - autoscaling_events: MutableSequence['AutoscalingEvent'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='AutoscalingEvent', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/metrics.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/metrics.py deleted file mode 100644 index b2aaa9b..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/metrics.py +++ /dev/null @@ -1,619 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.dataflow.v1beta3', - manifest={ - 'ExecutionState', - 'MetricStructuredName', - 'MetricUpdate', - 'GetJobMetricsRequest', - 'JobMetrics', - 'GetJobExecutionDetailsRequest', - 'ProgressTimeseries', - 'StageSummary', - 'JobExecutionDetails', - 'GetStageExecutionDetailsRequest', - 'WorkItemDetails', - 'WorkerDetails', - 'StageExecutionDetails', - }, -) - - -class ExecutionState(proto.Enum): - r"""The state of some component of job execution.""" - EXECUTION_STATE_UNKNOWN = 0 - EXECUTION_STATE_NOT_STARTED = 1 - EXECUTION_STATE_RUNNING = 2 - EXECUTION_STATE_SUCCEEDED = 3 - EXECUTION_STATE_FAILED = 4 - EXECUTION_STATE_CANCELLED = 5 - - -class MetricStructuredName(proto.Message): - r"""Identifies a metric, by describing the source which generated - the metric. - - Attributes: - origin (str): - Origin (namespace) of metric name. May be - blank for user-define metrics; will be - "dataflow" for metrics defined by the Dataflow - service or SDK. - name (str): - Worker-defined metric name. - context (MutableMapping[str, str]): - Zero or more labeled fields which identify the part of the - job this metric is associated with, such as the name of a - step or collection. - - For example, built-in counters associated with steps will - have context['step'] = . Counters associated with - PCollections in the SDK will have context['pcollection'] = . - """ - - origin: str = proto.Field( - proto.STRING, - number=1, - ) - name: str = proto.Field( - proto.STRING, - number=2, - ) - context: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class MetricUpdate(proto.Message): - r"""Describes the state of a metric. - - Attributes: - name (google.cloud.dataflow_v1beta3.types.MetricStructuredName): - Name of the metric. - kind (str): - Metric aggregation kind. The possible metric - aggregation kinds are "Sum", "Max", "Min", - "Mean", "Set", "And", "Or", and "Distribution". - The specified aggregation kind is - case-insensitive. - If omitted, this is not an aggregated value but - instead a single metric sample value. - cumulative (bool): - True if this metric is reported as the total - cumulative aggregate value accumulated since the - worker started working on this WorkItem. By - default this is false, indicating that this - metric is reported as a delta that is not - associated with any WorkItem. - scalar (google.protobuf.struct_pb2.Value): - Worker-computed aggregate value for - aggregation kinds "Sum", "Max", "Min", "And", - and "Or". The possible value types are Long, - Double, and Boolean. - mean_sum (google.protobuf.struct_pb2.Value): - Worker-computed aggregate value for the "Mean" aggregation - kind. This holds the sum of the aggregated values and is - used in combination with mean_count below to obtain the - actual mean aggregate value. The only possible value types - are Long and Double. - mean_count (google.protobuf.struct_pb2.Value): - Worker-computed aggregate value for the "Mean" aggregation - kind. This holds the count of the aggregated values and is - used in combination with mean_sum above to obtain the actual - mean aggregate value. The only possible value type is Long. - set (google.protobuf.struct_pb2.Value): - Worker-computed aggregate value for the "Set" - aggregation kind. The only possible value type - is a list of Values whose type can be Long, - Double, or String, according to the metric's - type. All Values in the list must be of the - same type. - distribution (google.protobuf.struct_pb2.Value): - A struct value describing properties of a - distribution of numeric values. - gauge (google.protobuf.struct_pb2.Value): - A struct value describing properties of a - Gauge. Metrics of gauge type show the value of a - metric across time, and is aggregated based on - the newest value. - internal (google.protobuf.struct_pb2.Value): - Worker-computed aggregate value for internal - use by the Dataflow service. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp associated with the metric value. - Optional when workers are reporting work - progress; it will be filled in responses from - the metrics API. - """ - - name: 'MetricStructuredName' = proto.Field( - proto.MESSAGE, - number=1, - message='MetricStructuredName', - ) - kind: str = proto.Field( - proto.STRING, - number=2, - ) - cumulative: bool = proto.Field( - proto.BOOL, - number=3, - ) - scalar: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=4, - message=struct_pb2.Value, - ) - mean_sum: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=5, - message=struct_pb2.Value, - ) - mean_count: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=6, - message=struct_pb2.Value, - ) - set: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=7, - message=struct_pb2.Value, - ) - distribution: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=11, - message=struct_pb2.Value, - ) - gauge: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=12, - message=struct_pb2.Value, - ) - internal: struct_pb2.Value = proto.Field( - proto.MESSAGE, - number=8, - message=struct_pb2.Value, - ) - update_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - - -class GetJobMetricsRequest(proto.Message): - r"""Request to get job metrics. - - Attributes: - project_id (str): - A project id. - job_id (str): - The job to get metrics for. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Return only metric data that has changed - since this time. Default is to return all - information about all metrics for the job. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains the job specified by job_id. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - location: str = proto.Field( - proto.STRING, - number=4, - ) - - -class JobMetrics(proto.Message): - r"""JobMetrics contains a collection of metrics describing the - detailed progress of a Dataflow job. Metrics correspond to - user-defined and system-defined metrics in the job. - - This resource captures only the most recent values of each - metric; time-series data can be queried for them (under the same - metric names) from Cloud Monitoring. - - Attributes: - metric_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp as of which metric values are - current. - metrics (MutableSequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): - All metrics for this job. - """ - - metric_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - metrics: MutableSequence['MetricUpdate'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='MetricUpdate', - ) - - -class GetJobExecutionDetailsRequest(proto.Message): - r"""Request to get job execution details. - - Attributes: - project_id (str): - A project id. - job_id (str): - The job to get execution details for. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains the job specified by job_id. - page_size (int): - If specified, determines the maximum number - of stages to return. If unspecified, the - service may choose an appropriate default, or - may return an arbitrarily large number of - results. - page_token (str): - If supplied, this should be the value of next_page_token - returned by an earlier call. This will cause the next page - of results to be returned. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - location: str = proto.Field( - proto.STRING, - number=3, - ) - page_size: int = proto.Field( - proto.INT32, - number=4, - ) - page_token: str = proto.Field( - proto.STRING, - number=5, - ) - - -class ProgressTimeseries(proto.Message): - r"""Information about the progress of some component of job - execution. - - Attributes: - current_progress (float): - The current progress of the component, in the range [0,1]. - data_points (MutableSequence[google.cloud.dataflow_v1beta3.types.ProgressTimeseries.Point]): - History of progress for the component. - Points are sorted by time. - """ - - class Point(proto.Message): - r"""A point in the timeseries. - - Attributes: - time (google.protobuf.timestamp_pb2.Timestamp): - The timestamp of the point. - value (float): - The value of the point. - """ - - time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - value: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - current_progress: float = proto.Field( - proto.DOUBLE, - number=1, - ) - data_points: MutableSequence[Point] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Point, - ) - - -class StageSummary(proto.Message): - r"""Information about a particular execution stage of a job. - - Attributes: - stage_id (str): - ID of this stage - state (google.cloud.dataflow_v1beta3.types.ExecutionState): - State of this stage. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Start time of this stage. - end_time (google.protobuf.timestamp_pb2.Timestamp): - End time of this stage. - If the work item is completed, this is the - actual end time of the stage. Otherwise, it is - the predicted end time. - progress (google.cloud.dataflow_v1beta3.types.ProgressTimeseries): - Progress for this stage. - Only applicable to Batch jobs. - metrics (MutableSequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): - Metrics for this stage. - """ - - stage_id: str = proto.Field( - proto.STRING, - number=1, - ) - state: 'ExecutionState' = proto.Field( - proto.ENUM, - number=2, - enum='ExecutionState', - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - progress: 'ProgressTimeseries' = proto.Field( - proto.MESSAGE, - number=5, - message='ProgressTimeseries', - ) - metrics: MutableSequence['MetricUpdate'] = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='MetricUpdate', - ) - - -class JobExecutionDetails(proto.Message): - r"""Information about the execution of a job. - - Attributes: - stages (MutableSequence[google.cloud.dataflow_v1beta3.types.StageSummary]): - The stages of the job execution. - next_page_token (str): - If present, this response does not contain all requested - tasks. To obtain the next page of results, repeat the - request with page_token set to this value. - """ - - @property - def raw_page(self): - return self - - stages: MutableSequence['StageSummary'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='StageSummary', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -class GetStageExecutionDetailsRequest(proto.Message): - r"""Request to get information about a particular execution stage - of a job. Currently only tracked for Batch jobs. - - Attributes: - project_id (str): - A project id. - job_id (str): - The job to get execution details for. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - that contains the job specified by job_id. - stage_id (str): - The stage for which to fetch information. - page_size (int): - If specified, determines the maximum number - of work items to return. If unspecified, the - service may choose an appropriate default, or - may return an arbitrarily large number of - results. - page_token (str): - If supplied, this should be the value of next_page_token - returned by an earlier call. This will cause the next page - of results to be returned. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Lower time bound of work items to include, by - start time. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Upper time bound of work items to include, by - start time. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=2, - ) - location: str = proto.Field( - proto.STRING, - number=3, - ) - stage_id: str = proto.Field( - proto.STRING, - number=4, - ) - page_size: int = proto.Field( - proto.INT32, - number=5, - ) - page_token: str = proto.Field( - proto.STRING, - number=6, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - - -class WorkItemDetails(proto.Message): - r"""Information about an individual work item execution. - - Attributes: - task_id (str): - Name of this work item. - attempt_id (str): - Attempt ID of this work item - start_time (google.protobuf.timestamp_pb2.Timestamp): - Start time of this work item attempt. - end_time (google.protobuf.timestamp_pb2.Timestamp): - End time of this work item attempt. - If the work item is completed, this is the - actual end time of the work item. Otherwise, it - is the predicted end time. - state (google.cloud.dataflow_v1beta3.types.ExecutionState): - State of this work item. - progress (google.cloud.dataflow_v1beta3.types.ProgressTimeseries): - Progress of this work item. - metrics (MutableSequence[google.cloud.dataflow_v1beta3.types.MetricUpdate]): - Metrics for this work item. - """ - - task_id: str = proto.Field( - proto.STRING, - number=1, - ) - attempt_id: str = proto.Field( - proto.STRING, - number=2, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - state: 'ExecutionState' = proto.Field( - proto.ENUM, - number=5, - enum='ExecutionState', - ) - progress: 'ProgressTimeseries' = proto.Field( - proto.MESSAGE, - number=6, - message='ProgressTimeseries', - ) - metrics: MutableSequence['MetricUpdate'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='MetricUpdate', - ) - - -class WorkerDetails(proto.Message): - r"""Information about a worker - - Attributes: - worker_name (str): - Name of this worker - work_items (MutableSequence[google.cloud.dataflow_v1beta3.types.WorkItemDetails]): - Work items processed by this worker, sorted - by time. - """ - - worker_name: str = proto.Field( - proto.STRING, - number=1, - ) - work_items: MutableSequence['WorkItemDetails'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='WorkItemDetails', - ) - - -class StageExecutionDetails(proto.Message): - r"""Information about the workers and work items within a stage. - - Attributes: - workers (MutableSequence[google.cloud.dataflow_v1beta3.types.WorkerDetails]): - Workers that have done work on the stage. - next_page_token (str): - If present, this response does not contain all requested - tasks. To obtain the next page of results, repeat the - request with page_token set to this value. - """ - - @property - def raw_page(self): - return self - - workers: MutableSequence['WorkerDetails'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='WorkerDetails', - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/snapshots.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/snapshots.py deleted file mode 100644 index 96010b1..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/snapshots.py +++ /dev/null @@ -1,253 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.dataflow.v1beta3', - manifest={ - 'SnapshotState', - 'PubsubSnapshotMetadata', - 'Snapshot', - 'GetSnapshotRequest', - 'DeleteSnapshotRequest', - 'DeleteSnapshotResponse', - 'ListSnapshotsRequest', - 'ListSnapshotsResponse', - }, -) - - -class SnapshotState(proto.Enum): - r"""Snapshot state.""" - UNKNOWN_SNAPSHOT_STATE = 0 - PENDING = 1 - RUNNING = 2 - READY = 3 - FAILED = 4 - DELETED = 5 - - -class PubsubSnapshotMetadata(proto.Message): - r"""Represents a Pubsub snapshot. - - Attributes: - topic_name (str): - The name of the Pubsub topic. - snapshot_name (str): - The name of the Pubsub snapshot. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - The expire time of the Pubsub snapshot. - """ - - topic_name: str = proto.Field( - proto.STRING, - number=1, - ) - snapshot_name: str = proto.Field( - proto.STRING, - number=2, - ) - expire_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - - -class Snapshot(proto.Message): - r"""Represents a snapshot of a job. - - Attributes: - id (str): - The unique ID of this snapshot. - project_id (str): - The project this snapshot belongs to. - source_job_id (str): - The job this snapshot was created from. - creation_time (google.protobuf.timestamp_pb2.Timestamp): - The time this snapshot was created. - ttl (google.protobuf.duration_pb2.Duration): - The time after which this snapshot will be - automatically deleted. - state (google.cloud.dataflow_v1beta3.types.SnapshotState): - State of the snapshot. - pubsub_metadata (MutableSequence[google.cloud.dataflow_v1beta3.types.PubsubSnapshotMetadata]): - Pub/Sub snapshot metadata. - description (str): - User specified description of the snapshot. - Maybe empty. - disk_size_bytes (int): - The disk byte size of the snapshot. Only - available for snapshots in READY state. - region (str): - Cloud region where this snapshot lives in, - e.g., "us-central1". - """ - - id: str = proto.Field( - proto.STRING, - number=1, - ) - project_id: str = proto.Field( - proto.STRING, - number=2, - ) - source_job_id: str = proto.Field( - proto.STRING, - number=3, - ) - creation_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - ttl: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=5, - message=duration_pb2.Duration, - ) - state: 'SnapshotState' = proto.Field( - proto.ENUM, - number=6, - enum='SnapshotState', - ) - pubsub_metadata: MutableSequence['PubsubSnapshotMetadata'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='PubsubSnapshotMetadata', - ) - description: str = proto.Field( - proto.STRING, - number=8, - ) - disk_size_bytes: int = proto.Field( - proto.INT64, - number=9, - ) - region: str = proto.Field( - proto.STRING, - number=10, - ) - - -class GetSnapshotRequest(proto.Message): - r"""Request to get information about a snapshot - - Attributes: - project_id (str): - The ID of the Cloud Platform project that the - snapshot belongs to. - snapshot_id (str): - The ID of the snapshot. - location (str): - The location that contains this snapshot. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - snapshot_id: str = proto.Field( - proto.STRING, - number=2, - ) - location: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteSnapshotRequest(proto.Message): - r"""Request to delete a snapshot. - - Attributes: - project_id (str): - The ID of the Cloud Platform project that the - snapshot belongs to. - snapshot_id (str): - The ID of the snapshot. - location (str): - The location that contains this snapshot. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - snapshot_id: str = proto.Field( - proto.STRING, - number=2, - ) - location: str = proto.Field( - proto.STRING, - number=3, - ) - - -class DeleteSnapshotResponse(proto.Message): - r"""Response from deleting a snapshot. - """ - - -class ListSnapshotsRequest(proto.Message): - r"""Request to list snapshots. - - Attributes: - project_id (str): - The project ID to list snapshots for. - job_id (str): - If specified, list snapshots created from - this job. - location (str): - The location to list snapshots in. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_id: str = proto.Field( - proto.STRING, - number=3, - ) - location: str = proto.Field( - proto.STRING, - number=2, - ) - - -class ListSnapshotsResponse(proto.Message): - r"""List of snapshots. - - Attributes: - snapshots (MutableSequence[google.cloud.dataflow_v1beta3.types.Snapshot]): - Returned snapshots. - """ - - snapshots: MutableSequence['Snapshot'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Snapshot', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/streaming.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/streaming.py deleted file mode 100644 index 4656222..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/streaming.py +++ /dev/null @@ -1,501 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.dataflow.v1beta3', - manifest={ - 'TopologyConfig', - 'PubsubLocation', - 'StreamingStageLocation', - 'StreamingSideInputLocation', - 'CustomSourceLocation', - 'StreamLocation', - 'StateFamilyConfig', - 'ComputationTopology', - 'KeyRangeLocation', - 'MountedDataDisk', - 'DataDiskAssignment', - 'KeyRangeDataDiskAssignment', - 'StreamingComputationRanges', - 'StreamingApplianceSnapshotConfig', - }, -) - - -class TopologyConfig(proto.Message): - r"""Global topology of the streaming Dataflow job, including all - computations and their sharded locations. - - Attributes: - computations (MutableSequence[google.cloud.dataflow_v1beta3.types.ComputationTopology]): - The computations associated with a streaming - Dataflow job. - data_disk_assignments (MutableSequence[google.cloud.dataflow_v1beta3.types.DataDiskAssignment]): - The disks assigned to a streaming Dataflow - job. - user_stage_to_computation_name_map (MutableMapping[str, str]): - Maps user stage names to stable computation - names. - forwarding_key_bits (int): - The size (in bits) of keys that will be - assigned to source messages. - persistent_state_version (int): - Version number for persistent state. - """ - - computations: MutableSequence['ComputationTopology'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ComputationTopology', - ) - data_disk_assignments: MutableSequence['DataDiskAssignment'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='DataDiskAssignment', - ) - user_stage_to_computation_name_map: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - forwarding_key_bits: int = proto.Field( - proto.INT32, - number=4, - ) - persistent_state_version: int = proto.Field( - proto.INT32, - number=5, - ) - - -class PubsubLocation(proto.Message): - r"""Identifies a pubsub location to use for transferring data - into or out of a streaming Dataflow job. - - Attributes: - topic (str): - A pubsub topic, in the form of - "pubsub.googleapis.com/topics//". - subscription (str): - A pubsub subscription, in the form of - "pubsub.googleapis.com/subscriptions//". - timestamp_label (str): - If set, contains a pubsub label from which to - extract record timestamps. If left empty, record - timestamps will be generated upon arrival. - id_label (str): - If set, contains a pubsub label from which to - extract record ids. If left empty, record - deduplication will be strictly best effort. - drop_late_data (bool): - Indicates whether the pipeline allows - late-arriving data. - tracking_subscription (str): - If set, specifies the pubsub subscription - that will be used for tracking custom time - timestamps for watermark estimation. - with_attributes (bool): - If true, then the client has requested to get - pubsub attributes. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - subscription: str = proto.Field( - proto.STRING, - number=2, - ) - timestamp_label: str = proto.Field( - proto.STRING, - number=3, - ) - id_label: str = proto.Field( - proto.STRING, - number=4, - ) - drop_late_data: bool = proto.Field( - proto.BOOL, - number=5, - ) - tracking_subscription: str = proto.Field( - proto.STRING, - number=6, - ) - with_attributes: bool = proto.Field( - proto.BOOL, - number=7, - ) - - -class StreamingStageLocation(proto.Message): - r"""Identifies the location of a streaming computation stage, for - stage-to-stage communication. - - Attributes: - stream_id (str): - Identifies the particular stream within the - streaming Dataflow job. - """ - - stream_id: str = proto.Field( - proto.STRING, - number=1, - ) - - -class StreamingSideInputLocation(proto.Message): - r"""Identifies the location of a streaming side input. - - Attributes: - tag (str): - Identifies the particular side input within - the streaming Dataflow job. - state_family (str): - Identifies the state family where this side - input is stored. - """ - - tag: str = proto.Field( - proto.STRING, - number=1, - ) - state_family: str = proto.Field( - proto.STRING, - number=2, - ) - - -class CustomSourceLocation(proto.Message): - r"""Identifies the location of a custom souce. - - Attributes: - stateful (bool): - Whether this source is stateful. - """ - - stateful: bool = proto.Field( - proto.BOOL, - number=1, - ) - - -class StreamLocation(proto.Message): - r"""Describes a stream of data, either as input to be processed - or as output of a streaming Dataflow job. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - streaming_stage_location (google.cloud.dataflow_v1beta3.types.StreamingStageLocation): - The stream is part of another computation - within the current streaming Dataflow job. - - This field is a member of `oneof`_ ``location``. - pubsub_location (google.cloud.dataflow_v1beta3.types.PubsubLocation): - The stream is a pubsub stream. - - This field is a member of `oneof`_ ``location``. - side_input_location (google.cloud.dataflow_v1beta3.types.StreamingSideInputLocation): - The stream is a streaming side input. - - This field is a member of `oneof`_ ``location``. - custom_source_location (google.cloud.dataflow_v1beta3.types.CustomSourceLocation): - The stream is a custom source. - - This field is a member of `oneof`_ ``location``. - """ - - streaming_stage_location: 'StreamingStageLocation' = proto.Field( - proto.MESSAGE, - number=1, - oneof='location', - message='StreamingStageLocation', - ) - pubsub_location: 'PubsubLocation' = proto.Field( - proto.MESSAGE, - number=2, - oneof='location', - message='PubsubLocation', - ) - side_input_location: 'StreamingSideInputLocation' = proto.Field( - proto.MESSAGE, - number=3, - oneof='location', - message='StreamingSideInputLocation', - ) - custom_source_location: 'CustomSourceLocation' = proto.Field( - proto.MESSAGE, - number=4, - oneof='location', - message='CustomSourceLocation', - ) - - -class StateFamilyConfig(proto.Message): - r"""State family configuration. - - Attributes: - state_family (str): - The state family value. - is_read (bool): - If true, this family corresponds to a read - operation. - """ - - state_family: str = proto.Field( - proto.STRING, - number=1, - ) - is_read: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class ComputationTopology(proto.Message): - r"""All configuration data for a particular Computation. - - Attributes: - system_stage_name (str): - The system stage name. - computation_id (str): - The ID of the computation. - key_ranges (MutableSequence[google.cloud.dataflow_v1beta3.types.KeyRangeLocation]): - The key ranges processed by the computation. - inputs (MutableSequence[google.cloud.dataflow_v1beta3.types.StreamLocation]): - The inputs to the computation. - outputs (MutableSequence[google.cloud.dataflow_v1beta3.types.StreamLocation]): - The outputs from the computation. - state_families (MutableSequence[google.cloud.dataflow_v1beta3.types.StateFamilyConfig]): - The state family values. - """ - - system_stage_name: str = proto.Field( - proto.STRING, - number=1, - ) - computation_id: str = proto.Field( - proto.STRING, - number=5, - ) - key_ranges: MutableSequence['KeyRangeLocation'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='KeyRangeLocation', - ) - inputs: MutableSequence['StreamLocation'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StreamLocation', - ) - outputs: MutableSequence['StreamLocation'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='StreamLocation', - ) - state_families: MutableSequence['StateFamilyConfig'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='StateFamilyConfig', - ) - - -class KeyRangeLocation(proto.Message): - r"""Location information for a specific key-range of a sharded - computation. Currently we only support UTF-8 character splits to - simplify encoding into JSON. - - Attributes: - start (str): - The start (inclusive) of the key range. - end (str): - The end (exclusive) of the key range. - delivery_endpoint (str): - The physical location of this range - assignment to be used for streaming computation - cross-worker message delivery. - data_disk (str): - The name of the data disk where data for this - range is stored. This name is local to the - Google Cloud Platform project and uniquely - identifies the disk within that project, for - example - "myproject-1014-104817-4c2-harness-0-disk-1". - deprecated_persistent_directory (str): - DEPRECATED. The location of the persistent - state for this range, as a persistent directory - in the worker local filesystem. - """ - - start: str = proto.Field( - proto.STRING, - number=1, - ) - end: str = proto.Field( - proto.STRING, - number=2, - ) - delivery_endpoint: str = proto.Field( - proto.STRING, - number=3, - ) - data_disk: str = proto.Field( - proto.STRING, - number=5, - ) - deprecated_persistent_directory: str = proto.Field( - proto.STRING, - number=4, - ) - - -class MountedDataDisk(proto.Message): - r"""Describes mounted data disk. - - Attributes: - data_disk (str): - The name of the data disk. - This name is local to the Google Cloud Platform - project and uniquely identifies the disk within - that project, for example - "myproject-1014-104817-4c2-harness-0-disk-1". - """ - - data_disk: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DataDiskAssignment(proto.Message): - r"""Data disk assignment for a given VM instance. - - Attributes: - vm_instance (str): - VM instance name the data disks mounted to, - for example - "myproject-1014-104817-4c2-harness-0". - data_disks (MutableSequence[str]): - Mounted data disks. The order is important a - data disk's 0-based index in this list defines - which persistent directory the disk is mounted - to, for example the list of { - "myproject-1014-104817-4c2-harness-0-disk-0" }, - { "myproject-1014-104817-4c2-harness-0-disk-1" - }. - """ - - vm_instance: str = proto.Field( - proto.STRING, - number=1, - ) - data_disks: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class KeyRangeDataDiskAssignment(proto.Message): - r"""Data disk assignment information for a specific key-range of - a sharded computation. - Currently we only support UTF-8 character splits to simplify - encoding into JSON. - - Attributes: - start (str): - The start (inclusive) of the key range. - end (str): - The end (exclusive) of the key range. - data_disk (str): - The name of the data disk where data for this - range is stored. This name is local to the - Google Cloud Platform project and uniquely - identifies the disk within that project, for - example - "myproject-1014-104817-4c2-harness-0-disk-1". - """ - - start: str = proto.Field( - proto.STRING, - number=1, - ) - end: str = proto.Field( - proto.STRING, - number=2, - ) - data_disk: str = proto.Field( - proto.STRING, - number=3, - ) - - -class StreamingComputationRanges(proto.Message): - r"""Describes full or partial data disk assignment information of - the computation ranges. - - Attributes: - computation_id (str): - The ID of the computation. - range_assignments (MutableSequence[google.cloud.dataflow_v1beta3.types.KeyRangeDataDiskAssignment]): - Data disk assignments for ranges from this - computation. - """ - - computation_id: str = proto.Field( - proto.STRING, - number=1, - ) - range_assignments: MutableSequence['KeyRangeDataDiskAssignment'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='KeyRangeDataDiskAssignment', - ) - - -class StreamingApplianceSnapshotConfig(proto.Message): - r"""Streaming appliance snapshot configuration. - - Attributes: - snapshot_id (str): - If set, indicates the snapshot id for the - snapshot being performed. - import_state_endpoint (str): - Indicates which endpoint is used to import - appliance state. - """ - - snapshot_id: str = proto.Field( - proto.STRING, - number=1, - ) - import_state_endpoint: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/templates.py b/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/templates.py deleted file mode 100644 index 0ae7c25..0000000 --- a/owl-bot-staging/v1beta3/google/cloud/dataflow_v1beta3/types/templates.py +++ /dev/null @@ -1,1063 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.dataflow_v1beta3.types import environment as gd_environment -from google.cloud.dataflow_v1beta3.types import jobs -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.dataflow.v1beta3', - manifest={ - 'ParameterType', - 'LaunchFlexTemplateResponse', - 'ContainerSpec', - 'LaunchFlexTemplateParameter', - 'FlexTemplateRuntimeEnvironment', - 'LaunchFlexTemplateRequest', - 'RuntimeEnvironment', - 'ParameterMetadata', - 'TemplateMetadata', - 'SDKInfo', - 'RuntimeMetadata', - 'CreateJobFromTemplateRequest', - 'GetTemplateRequest', - 'GetTemplateResponse', - 'LaunchTemplateParameters', - 'LaunchTemplateRequest', - 'LaunchTemplateResponse', - 'InvalidTemplateParameters', - 'DynamicTemplateLaunchParams', - }, -) - - -class ParameterType(proto.Enum): - r"""ParameterType specifies what kind of input we need for this - parameter. - """ - DEFAULT = 0 - TEXT = 1 - GCS_READ_BUCKET = 2 - GCS_WRITE_BUCKET = 3 - GCS_READ_FILE = 4 - GCS_WRITE_FILE = 5 - GCS_READ_FOLDER = 6 - GCS_WRITE_FOLDER = 7 - PUBSUB_TOPIC = 8 - PUBSUB_SUBSCRIPTION = 9 - - -class LaunchFlexTemplateResponse(proto.Message): - r"""Response to the request to launch a job from Flex Template. - - Attributes: - job (google.cloud.dataflow_v1beta3.types.Job): - The job that was launched, if the request was - not a dry run and the job was successfully - launched. - """ - - job: jobs.Job = proto.Field( - proto.MESSAGE, - number=1, - message=jobs.Job, - ) - - -class ContainerSpec(proto.Message): - r"""Container Spec. - - Attributes: - image (str): - Name of the docker container image. E.g., - gcr.io/project/some-image - metadata (google.cloud.dataflow_v1beta3.types.TemplateMetadata): - Metadata describing a template including - description and validation rules. - sdk_info (google.cloud.dataflow_v1beta3.types.SDKInfo): - Required. SDK info of the Flex Template. - default_environment (google.cloud.dataflow_v1beta3.types.FlexTemplateRuntimeEnvironment): - Default runtime environment for the job. - """ - - image: str = proto.Field( - proto.STRING, - number=1, - ) - metadata: 'TemplateMetadata' = proto.Field( - proto.MESSAGE, - number=2, - message='TemplateMetadata', - ) - sdk_info: 'SDKInfo' = proto.Field( - proto.MESSAGE, - number=3, - message='SDKInfo', - ) - default_environment: 'FlexTemplateRuntimeEnvironment' = proto.Field( - proto.MESSAGE, - number=4, - message='FlexTemplateRuntimeEnvironment', - ) - - -class LaunchFlexTemplateParameter(proto.Message): - r"""Launch FlexTemplate Parameter. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - job_name (str): - Required. The job name to use for the created - job. For update job request, job name should be - same as the existing running job. - container_spec (google.cloud.dataflow_v1beta3.types.ContainerSpec): - Spec about the container image to launch. - - This field is a member of `oneof`_ ``template``. - container_spec_gcs_path (str): - Cloud Storage path to a file with json - serialized ContainerSpec as content. - - This field is a member of `oneof`_ ``template``. - parameters (MutableMapping[str, str]): - The parameters for FlexTemplate. Ex. {"num_workers":"5"} - launch_options (MutableMapping[str, str]): - Launch options for this flex template job. - This is a common set of options across languages - and templates. This should not be used to pass - job parameters. - environment (google.cloud.dataflow_v1beta3.types.FlexTemplateRuntimeEnvironment): - The runtime environment for the FlexTemplate - job - update (bool): - Set this to true if you are sending a request - to update a running streaming job. When set, the - job name should be the same as the running job. - transform_name_mappings (MutableMapping[str, str]): - Use this to pass transform_name_mappings for streaming - update jobs. Ex:{"oldTransformName":"newTransformName",...}' - """ - - job_name: str = proto.Field( - proto.STRING, - number=1, - ) - container_spec: 'ContainerSpec' = proto.Field( - proto.MESSAGE, - number=4, - oneof='template', - message='ContainerSpec', - ) - container_spec_gcs_path: str = proto.Field( - proto.STRING, - number=5, - oneof='template', - ) - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - launch_options: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=6, - ) - environment: 'FlexTemplateRuntimeEnvironment' = proto.Field( - proto.MESSAGE, - number=7, - message='FlexTemplateRuntimeEnvironment', - ) - update: bool = proto.Field( - proto.BOOL, - number=8, - ) - transform_name_mappings: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=9, - ) - - -class FlexTemplateRuntimeEnvironment(proto.Message): - r"""The environment values to be set at runtime for flex - template. - - Attributes: - num_workers (int): - The initial number of Google Compute Engine - instances for the job. - max_workers (int): - The maximum number of Google Compute Engine - instances to be made available to your pipeline - during execution, from 1 to 1000. - zone (str): - The Compute Engine `availability - zone `__ - for launching worker instances to run your pipeline. In the - future, worker_zone will take precedence. - service_account_email (str): - The email address of the service account to - run the job as. - temp_location (str): - The Cloud Storage path to use for temporary files. Must be a - valid Cloud Storage URL, beginning with ``gs://``. - machine_type (str): - The machine type to use for the job. Defaults - to the value from the template if not specified. - additional_experiments (MutableSequence[str]): - Additional experiment flags for the job. - network (str): - Network to which VMs will be assigned. If - empty or unspecified, the service will use the - network "default". - subnetwork (str): - Subnetwork to which VMs will be assigned, if desired. You - can specify a subnetwork using either a complete URL or an - abbreviated path. Expected to be of the form - "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" - or "regions/REGION/subnetworks/SUBNETWORK". If the - subnetwork is located in a Shared VPC network, you must use - the complete URL. - additional_user_labels (MutableMapping[str, str]): - Additional user labels to be specified for the job. Keys and - values must follow the restrictions specified in the - `labeling - restrictions `__ - page. An object containing a list of "key": value pairs. - Example: { "name": "wrench", "mass": "1kg", "count": "3" }. - kms_key_name (str): - Name for the Cloud KMS key for the job. - Key format is: - projects//locations//keyRings//cryptoKeys/ - ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): - Configuration for VM IPs. - worker_region (str): - The Compute Engine region - (https://cloud.google.com/compute/docs/regions-zones/regions-zones) - in which worker processing should occur, e.g. "us-west1". - Mutually exclusive with worker_zone. If neither - worker_region nor worker_zone is specified, default to the - control plane's region. - worker_zone (str): - The Compute Engine zone - (https://cloud.google.com/compute/docs/regions-zones/regions-zones) - in which worker processing should occur, e.g. "us-west1-a". - Mutually exclusive with worker_region. If neither - worker_region nor worker_zone is specified, a zone in the - control plane's region is chosen based on available - capacity. If both ``worker_zone`` and ``zone`` are set, - ``worker_zone`` takes precedence. - enable_streaming_engine (bool): - Whether to enable Streaming Engine for the - job. - flexrs_goal (google.cloud.dataflow_v1beta3.types.FlexResourceSchedulingGoal): - Set FlexRS goal for the job. - https://cloud.google.com/dataflow/docs/guides/flexrs - staging_location (str): - The Cloud Storage path for staging local files. Must be a - valid Cloud Storage URL, beginning with ``gs://``. - sdk_container_image (str): - Docker registry location of container image - to use for the 'worker harness. Default is the - container for the version of the SDK. Note this - field is only valid for portable pipelines. - disk_size_gb (int): - Worker disk size, in gigabytes. - autoscaling_algorithm (google.cloud.dataflow_v1beta3.types.AutoscalingAlgorithm): - The algorithm to use for autoscaling - dump_heap_on_oom (bool): - If true, save a heap dump before killing a - thread or process which is GC thrashing or out - of memory. The location of the heap file will - either be echoed back to the user, or the user - will be given the opportunity to download the - heap file. - save_heap_dumps_to_gcs_path (str): - Cloud Storage bucket (directory) to upload heap dumps to the - given location. Enabling this implies that heap dumps should - be generated on OOM (dump_heap_on_oom is set to true). - launcher_machine_type (str): - The machine type to use for launching the - job. The default is n1-standard-1. - """ - - num_workers: int = proto.Field( - proto.INT32, - number=1, - ) - max_workers: int = proto.Field( - proto.INT32, - number=2, - ) - zone: str = proto.Field( - proto.STRING, - number=3, - ) - service_account_email: str = proto.Field( - proto.STRING, - number=4, - ) - temp_location: str = proto.Field( - proto.STRING, - number=5, - ) - machine_type: str = proto.Field( - proto.STRING, - number=6, - ) - additional_experiments: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - network: str = proto.Field( - proto.STRING, - number=8, - ) - subnetwork: str = proto.Field( - proto.STRING, - number=9, - ) - additional_user_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=11, - ) - ip_configuration: gd_environment.WorkerIPAddressConfiguration = proto.Field( - proto.ENUM, - number=12, - enum=gd_environment.WorkerIPAddressConfiguration, - ) - worker_region: str = proto.Field( - proto.STRING, - number=13, - ) - worker_zone: str = proto.Field( - proto.STRING, - number=14, - ) - enable_streaming_engine: bool = proto.Field( - proto.BOOL, - number=15, - ) - flexrs_goal: gd_environment.FlexResourceSchedulingGoal = proto.Field( - proto.ENUM, - number=16, - enum=gd_environment.FlexResourceSchedulingGoal, - ) - staging_location: str = proto.Field( - proto.STRING, - number=17, - ) - sdk_container_image: str = proto.Field( - proto.STRING, - number=18, - ) - disk_size_gb: int = proto.Field( - proto.INT32, - number=20, - ) - autoscaling_algorithm: gd_environment.AutoscalingAlgorithm = proto.Field( - proto.ENUM, - number=21, - enum=gd_environment.AutoscalingAlgorithm, - ) - dump_heap_on_oom: bool = proto.Field( - proto.BOOL, - number=22, - ) - save_heap_dumps_to_gcs_path: str = proto.Field( - proto.STRING, - number=23, - ) - launcher_machine_type: str = proto.Field( - proto.STRING, - number=24, - ) - - -class LaunchFlexTemplateRequest(proto.Message): - r"""A request to launch a Cloud Dataflow job from a FlexTemplate. - - Attributes: - project_id (str): - Required. The ID of the Cloud Platform - project that the job belongs to. - launch_parameter (google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateParameter): - Required. Parameter to launch a job form Flex - Template. - location (str): - Required. The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - to which to direct the request. E.g., us-central1, us-west1. - validate_only (bool): - If true, the request is validated but not - actually executed. Defaults to false. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - launch_parameter: 'LaunchFlexTemplateParameter' = proto.Field( - proto.MESSAGE, - number=2, - message='LaunchFlexTemplateParameter', - ) - location: str = proto.Field( - proto.STRING, - number=3, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=4, - ) - - -class RuntimeEnvironment(proto.Message): - r"""The environment values to set at runtime. - - Attributes: - num_workers (int): - The initial number of Google Compute Engine - instnaces for the job. - max_workers (int): - The maximum number of Google Compute Engine - instances to be made available to your pipeline - during execution, from 1 to 1000. - zone (str): - The Compute Engine `availability - zone `__ - for launching worker instances to run your pipeline. In the - future, worker_zone will take precedence. - service_account_email (str): - The email address of the service account to - run the job as. - temp_location (str): - The Cloud Storage path to use for temporary files. Must be a - valid Cloud Storage URL, beginning with ``gs://``. - bypass_temp_dir_validation (bool): - Whether to bypass the safety checks for the - job's temporary directory. Use with caution. - machine_type (str): - The machine type to use for the job. Defaults - to the value from the template if not specified. - additional_experiments (MutableSequence[str]): - Additional experiment flags for the job, specified with the - ``--experiments`` option. - network (str): - Network to which VMs will be assigned. If - empty or unspecified, the service will use the - network "default". - subnetwork (str): - Subnetwork to which VMs will be assigned, if desired. You - can specify a subnetwork using either a complete URL or an - abbreviated path. Expected to be of the form - "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK" - or "regions/REGION/subnetworks/SUBNETWORK". If the - subnetwork is located in a Shared VPC network, you must use - the complete URL. - additional_user_labels (MutableMapping[str, str]): - Additional user labels to be specified for the job. Keys and - values should follow the restrictions specified in the - `labeling - restrictions `__ - page. An object containing a list of "key": value pairs. - Example: { "name": "wrench", "mass": "1kg", "count": "3" }. - kms_key_name (str): - Name for the Cloud KMS key for the job. - Key format is: - projects//locations//keyRings//cryptoKeys/ - ip_configuration (google.cloud.dataflow_v1beta3.types.WorkerIPAddressConfiguration): - Configuration for VM IPs. - worker_region (str): - The Compute Engine region - (https://cloud.google.com/compute/docs/regions-zones/regions-zones) - in which worker processing should occur, e.g. "us-west1". - Mutually exclusive with worker_zone. If neither - worker_region nor worker_zone is specified, default to the - control plane's region. - worker_zone (str): - The Compute Engine zone - (https://cloud.google.com/compute/docs/regions-zones/regions-zones) - in which worker processing should occur, e.g. "us-west1-a". - Mutually exclusive with worker_region. If neither - worker_region nor worker_zone is specified, a zone in the - control plane's region is chosen based on available - capacity. If both ``worker_zone`` and ``zone`` are set, - ``worker_zone`` takes precedence. - enable_streaming_engine (bool): - Whether to enable Streaming Engine for the - job. - """ - - num_workers: int = proto.Field( - proto.INT32, - number=11, - ) - max_workers: int = proto.Field( - proto.INT32, - number=1, - ) - zone: str = proto.Field( - proto.STRING, - number=2, - ) - service_account_email: str = proto.Field( - proto.STRING, - number=3, - ) - temp_location: str = proto.Field( - proto.STRING, - number=4, - ) - bypass_temp_dir_validation: bool = proto.Field( - proto.BOOL, - number=5, - ) - machine_type: str = proto.Field( - proto.STRING, - number=6, - ) - additional_experiments: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=7, - ) - network: str = proto.Field( - proto.STRING, - number=8, - ) - subnetwork: str = proto.Field( - proto.STRING, - number=9, - ) - additional_user_labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - kms_key_name: str = proto.Field( - proto.STRING, - number=12, - ) - ip_configuration: gd_environment.WorkerIPAddressConfiguration = proto.Field( - proto.ENUM, - number=14, - enum=gd_environment.WorkerIPAddressConfiguration, - ) - worker_region: str = proto.Field( - proto.STRING, - number=15, - ) - worker_zone: str = proto.Field( - proto.STRING, - number=16, - ) - enable_streaming_engine: bool = proto.Field( - proto.BOOL, - number=17, - ) - - -class ParameterMetadata(proto.Message): - r"""Metadata for a specific parameter. - - Attributes: - name (str): - Required. The name of the parameter. - label (str): - Required. The label to display for the - parameter. - help_text (str): - Required. The help text to display for the - parameter. - is_optional (bool): - Optional. Whether the parameter is optional. - Defaults to false. - regexes (MutableSequence[str]): - Optional. Regexes that the parameter must - match. - param_type (google.cloud.dataflow_v1beta3.types.ParameterType): - Optional. The type of the parameter. - Used for selecting input picker. - custom_metadata (MutableMapping[str, str]): - Optional. Additional metadata for describing - this parameter. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - label: str = proto.Field( - proto.STRING, - number=2, - ) - help_text: str = proto.Field( - proto.STRING, - number=3, - ) - is_optional: bool = proto.Field( - proto.BOOL, - number=4, - ) - regexes: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=5, - ) - param_type: 'ParameterType' = proto.Field( - proto.ENUM, - number=6, - enum='ParameterType', - ) - custom_metadata: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=7, - ) - - -class TemplateMetadata(proto.Message): - r"""Metadata describing a template. - - Attributes: - name (str): - Required. The name of the template. - description (str): - Optional. A description of the template. - parameters (MutableSequence[google.cloud.dataflow_v1beta3.types.ParameterMetadata]): - The parameters for the template. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - parameters: MutableSequence['ParameterMetadata'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ParameterMetadata', - ) - - -class SDKInfo(proto.Message): - r"""SDK Information. - - Attributes: - language (google.cloud.dataflow_v1beta3.types.SDKInfo.Language): - Required. The SDK Language. - version (str): - Optional. The SDK version. - """ - class Language(proto.Enum): - r"""SDK Language.""" - UNKNOWN = 0 - JAVA = 1 - PYTHON = 2 - - language: Language = proto.Field( - proto.ENUM, - number=1, - enum=Language, - ) - version: str = proto.Field( - proto.STRING, - number=2, - ) - - -class RuntimeMetadata(proto.Message): - r"""RuntimeMetadata describing a runtime environment. - - Attributes: - sdk_info (google.cloud.dataflow_v1beta3.types.SDKInfo): - SDK Info for the template. - parameters (MutableSequence[google.cloud.dataflow_v1beta3.types.ParameterMetadata]): - The parameters for the template. - """ - - sdk_info: 'SDKInfo' = proto.Field( - proto.MESSAGE, - number=1, - message='SDKInfo', - ) - parameters: MutableSequence['ParameterMetadata'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='ParameterMetadata', - ) - - -class CreateJobFromTemplateRequest(proto.Message): - r"""A request to create a Cloud Dataflow job from a template. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project_id (str): - Required. The ID of the Cloud Platform - project that the job belongs to. - job_name (str): - Required. The job name to use for the created - job. - gcs_path (str): - Required. A Cloud Storage path to the template from which to - create the job. Must be a valid Cloud Storage URL, beginning - with ``gs://``. - - This field is a member of `oneof`_ ``template``. - parameters (MutableMapping[str, str]): - The runtime parameters to pass to the job. - environment (google.cloud.dataflow_v1beta3.types.RuntimeEnvironment): - The runtime environment for the job. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - to which to direct the request. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - job_name: str = proto.Field( - proto.STRING, - number=4, - ) - gcs_path: str = proto.Field( - proto.STRING, - number=2, - oneof='template', - ) - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - environment: 'RuntimeEnvironment' = proto.Field( - proto.MESSAGE, - number=5, - message='RuntimeEnvironment', - ) - location: str = proto.Field( - proto.STRING, - number=6, - ) - - -class GetTemplateRequest(proto.Message): - r"""A request to retrieve a Cloud Dataflow job template. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project_id (str): - Required. The ID of the Cloud Platform - project that the job belongs to. - gcs_path (str): - Required. A Cloud Storage path to the - template from which to create the job. - Must be valid Cloud Storage URL, beginning with - 'gs://'. - - This field is a member of `oneof`_ ``template``. - view (google.cloud.dataflow_v1beta3.types.GetTemplateRequest.TemplateView): - The view to retrieve. Defaults to METADATA_ONLY. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - to which to direct the request. - """ - class TemplateView(proto.Enum): - r"""The various views of a template that may be retrieved.""" - METADATA_ONLY = 0 - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - gcs_path: str = proto.Field( - proto.STRING, - number=2, - oneof='template', - ) - view: TemplateView = proto.Field( - proto.ENUM, - number=3, - enum=TemplateView, - ) - location: str = proto.Field( - proto.STRING, - number=4, - ) - - -class GetTemplateResponse(proto.Message): - r"""The response to a GetTemplate request. - - Attributes: - status (google.rpc.status_pb2.Status): - The status of the get template request. Any problems with - the request will be indicated in the error_details. - metadata (google.cloud.dataflow_v1beta3.types.TemplateMetadata): - The template metadata describing the template - name, available parameters, etc. - template_type (google.cloud.dataflow_v1beta3.types.GetTemplateResponse.TemplateType): - Template Type. - runtime_metadata (google.cloud.dataflow_v1beta3.types.RuntimeMetadata): - Describes the runtime metadata with SDKInfo - and available parameters. - """ - class TemplateType(proto.Enum): - r"""Template Type.""" - UNKNOWN = 0 - LEGACY = 1 - FLEX = 2 - - status: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - metadata: 'TemplateMetadata' = proto.Field( - proto.MESSAGE, - number=2, - message='TemplateMetadata', - ) - template_type: TemplateType = proto.Field( - proto.ENUM, - number=3, - enum=TemplateType, - ) - runtime_metadata: 'RuntimeMetadata' = proto.Field( - proto.MESSAGE, - number=4, - message='RuntimeMetadata', - ) - - -class LaunchTemplateParameters(proto.Message): - r"""Parameters to provide to the template being launched. - - Attributes: - job_name (str): - Required. The job name to use for the created - job. - parameters (MutableMapping[str, str]): - The runtime parameters to pass to the job. - environment (google.cloud.dataflow_v1beta3.types.RuntimeEnvironment): - The runtime environment for the job. - update (bool): - If set, replace the existing pipeline with - the name specified by jobName with this - pipeline, preserving state. - transform_name_mapping (MutableMapping[str, str]): - Only applicable when updating a pipeline. Map - of transform name prefixes of the job to be - replaced to the corresponding name prefixes of - the new job. - """ - - job_name: str = proto.Field( - proto.STRING, - number=1, - ) - parameters: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - environment: 'RuntimeEnvironment' = proto.Field( - proto.MESSAGE, - number=3, - message='RuntimeEnvironment', - ) - update: bool = proto.Field( - proto.BOOL, - number=4, - ) - transform_name_mapping: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class LaunchTemplateRequest(proto.Message): - r"""A request to launch a template. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - project_id (str): - Required. The ID of the Cloud Platform - project that the job belongs to. - validate_only (bool): - If true, the request is validated but not - actually executed. Defaults to false. - gcs_path (str): - A Cloud Storage path to the template from - which to create the job. - Must be valid Cloud Storage URL, beginning with - 'gs://'. - - This field is a member of `oneof`_ ``template``. - dynamic_template (google.cloud.dataflow_v1beta3.types.DynamicTemplateLaunchParams): - Params for launching a dynamic template. - - This field is a member of `oneof`_ ``template``. - launch_parameters (google.cloud.dataflow_v1beta3.types.LaunchTemplateParameters): - The parameters of the template to launch. - This should be part of the body of the POST - request. - location (str): - The [regional endpoint] - (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) - to which to direct the request. - """ - - project_id: str = proto.Field( - proto.STRING, - number=1, - ) - validate_only: bool = proto.Field( - proto.BOOL, - number=2, - ) - gcs_path: str = proto.Field( - proto.STRING, - number=3, - oneof='template', - ) - dynamic_template: 'DynamicTemplateLaunchParams' = proto.Field( - proto.MESSAGE, - number=6, - oneof='template', - message='DynamicTemplateLaunchParams', - ) - launch_parameters: 'LaunchTemplateParameters' = proto.Field( - proto.MESSAGE, - number=4, - message='LaunchTemplateParameters', - ) - location: str = proto.Field( - proto.STRING, - number=5, - ) - - -class LaunchTemplateResponse(proto.Message): - r"""Response to the request to launch a template. - - Attributes: - job (google.cloud.dataflow_v1beta3.types.Job): - The job that was launched, if the request was - not a dry run and the job was successfully - launched. - """ - - job: jobs.Job = proto.Field( - proto.MESSAGE, - number=1, - message=jobs.Job, - ) - - -class InvalidTemplateParameters(proto.Message): - r"""Used in the error_details field of a google.rpc.Status message, this - indicates problems with the template parameter. - - Attributes: - parameter_violations (MutableSequence[google.cloud.dataflow_v1beta3.types.InvalidTemplateParameters.ParameterViolation]): - Describes all parameter violations in a - template request. - """ - - class ParameterViolation(proto.Message): - r"""A specific template-parameter violation. - - Attributes: - parameter (str): - The parameter that failed to validate. - description (str): - A description of why the parameter failed to - validate. - """ - - parameter: str = proto.Field( - proto.STRING, - number=1, - ) - description: str = proto.Field( - proto.STRING, - number=2, - ) - - parameter_violations: MutableSequence[ParameterViolation] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=ParameterViolation, - ) - - -class DynamicTemplateLaunchParams(proto.Message): - r"""Params which should be passed when launching a dynamic - template. - - Attributes: - gcs_path (str): - Path to dynamic template spec file on Cloud - Storage. The file must be a Json serialized - DynamicTemplateFieSpec object. - staging_location (str): - Cloud Storage path for staging dependencies. Must be a valid - Cloud Storage URL, beginning with ``gs://``. - """ - - gcs_path: str = proto.Field( - proto.STRING, - number=1, - ) - staging_location: str = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta3/mypy.ini b/owl-bot-staging/v1beta3/mypy.ini deleted file mode 100644 index 574c5ae..0000000 --- a/owl-bot-staging/v1beta3/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1beta3/noxfile.py b/owl-bot-staging/v1beta3/noxfile.py deleted file mode 100644 index 2951d2e..0000000 --- a/owl-bot-staging/v1beta3/noxfile.py +++ /dev/null @@ -1,183 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.10" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/dataflow_v1beta3/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py deleted file mode 100644 index 0a4fc7b..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LaunchFlexTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_launch_flex_template(): - # Create a client - client = dataflow_v1beta3.FlexTemplatesServiceAsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.LaunchFlexTemplateRequest( - ) - - # Make the request - response = await client.launch_flex_template(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py deleted file mode 100644 index 8f236ed..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LaunchFlexTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_launch_flex_template(): - # Create a client - client = dataflow_v1beta3.FlexTemplatesServiceClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.LaunchFlexTemplateRequest( - ) - - # Make the request - response = client.launch_flex_template(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py deleted file mode 100644 index 701ae61..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AggregatedListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_aggregated_list_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobsRequest( - ) - - # Make the request - page_result = client.aggregated_list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py deleted file mode 100644 index 7b9946e..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for AggregatedListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_aggregated_list_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobsRequest( - ) - - # Make the request - page_result = client.aggregated_list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py deleted file mode 100644 index eb9e815..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CheckActiveJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_check_active_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.CheckActiveJobsRequest( - ) - - # Make the request - response = await client.check_active_jobs(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py deleted file mode 100644 index 591caa0..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CheckActiveJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_check_active_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.CheckActiveJobsRequest( - ) - - # Make the request - response = client.check_active_jobs(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py deleted file mode 100644 index aa6081a..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_create_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.CreateJobRequest( - ) - - # Make the request - response = await client.create_job(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py deleted file mode 100644 index b91dc91..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_create_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.CreateJobRequest( - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py deleted file mode 100644 index addc3ab..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_get_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobRequest( - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py deleted file mode 100644 index c4bef41..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_get_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobRequest( - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py deleted file mode 100644 index c4084d0..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_list_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py deleted file mode 100644 index a975672..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_list_jobs(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobsRequest( - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py deleted file mode 100644 index 7be3e3f..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SnapshotJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_snapshot_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.SnapshotJobRequest( - ) - - # Make the request - response = await client.snapshot_job(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py deleted file mode 100644 index db5e822..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for SnapshotJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_snapshot_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.SnapshotJobRequest( - ) - - # Make the request - response = client.snapshot_job(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py deleted file mode 100644 index a5b58a1..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_update_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.UpdateJobRequest( - ) - - # Make the request - response = await client.update_job(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py deleted file mode 100644 index 9fff7ed..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_update_job(): - # Create a client - client = dataflow_v1beta3.JobsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.UpdateJobRequest( - ) - - # Make the request - response = client.update_job(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py deleted file mode 100644 index 8f449c0..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobMessages -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_list_job_messages(): - # Create a client - client = dataflow_v1beta3.MessagesV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobMessagesRequest( - ) - - # Make the request - page_result = client.list_job_messages(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py deleted file mode 100644 index 256bde8..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobMessages -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_list_job_messages(): - # Create a client - client = dataflow_v1beta3.MessagesV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListJobMessagesRequest( - ) - - # Make the request - page_result = client.list_job_messages(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py deleted file mode 100644 index 97150ab..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobExecutionDetails -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_get_job_execution_details(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobExecutionDetailsRequest( - ) - - # Make the request - page_result = client.get_job_execution_details(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py deleted file mode 100644 index 9268495..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobExecutionDetails -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_get_job_execution_details(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobExecutionDetailsRequest( - ) - - # Make the request - page_result = client.get_job_execution_details(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py deleted file mode 100644 index c285799..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobMetrics -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_get_job_metrics(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobMetricsRequest( - ) - - # Make the request - response = await client.get_job_metrics(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py deleted file mode 100644 index bbe9622..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobMetrics -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_get_job_metrics(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetJobMetricsRequest( - ) - - # Make the request - response = client.get_job_metrics(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py deleted file mode 100644 index 431fc92..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStageExecutionDetails -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_get_stage_execution_details(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetStageExecutionDetailsRequest( - ) - - # Make the request - page_result = client.get_stage_execution_details(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py deleted file mode 100644 index c9e9729..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetStageExecutionDetails -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_get_stage_execution_details(): - # Create a client - client = dataflow_v1beta3.MetricsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetStageExecutionDetailsRequest( - ) - - # Make the request - page_result = client.get_stage_execution_details(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py deleted file mode 100644 index a3d83e7..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_delete_snapshot(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.DeleteSnapshotRequest( - ) - - # Make the request - response = await client.delete_snapshot(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py deleted file mode 100644 index 59a50f5..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_delete_snapshot(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.DeleteSnapshotRequest( - ) - - # Make the request - response = client.delete_snapshot(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py deleted file mode 100644 index b95e491..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_get_snapshot(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetSnapshotRequest( - ) - - # Make the request - response = await client.get_snapshot(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py deleted file mode 100644 index f1861f6..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_get_snapshot(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetSnapshotRequest( - ) - - # Make the request - response = client.get_snapshot(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py deleted file mode 100644 index e8303a0..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSnapshots -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_list_snapshots(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3AsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListSnapshotsRequest( - ) - - # Make the request - response = await client.list_snapshots(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py deleted file mode 100644 index bdd2529..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSnapshots -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_list_snapshots(): - # Create a client - client = dataflow_v1beta3.SnapshotsV1Beta3Client() - - # Initialize request argument(s) - request = dataflow_v1beta3.ListSnapshotsRequest( - ) - - # Make the request - response = client.list_snapshots(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py deleted file mode 100644 index e21cec8..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobFromTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_create_job_from_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceAsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.CreateJobFromTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = await client.create_job_from_template(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py deleted file mode 100644 index 175c180..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobFromTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_create_job_from_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.CreateJobFromTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = client.create_job_from_template(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py deleted file mode 100644 index 8760665..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_get_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceAsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = await client.get_template(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py deleted file mode 100644 index c7f9cae..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_get_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_get_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.GetTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = client.get_template(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py deleted file mode 100644 index 5d00450..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LaunchTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -async def sample_launch_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceAsyncClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.LaunchTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = await client.launch_template(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py b/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py deleted file mode 100644 index 7f1f81d..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/dataflow_v1beta3_generated_templates_service_launch_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for LaunchTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-dataflow-client - - -# [START dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud import dataflow_v1beta3 - - -def sample_launch_template(): - # Create a client - client = dataflow_v1beta3.TemplatesServiceClient() - - # Initialize request argument(s) - request = dataflow_v1beta3.LaunchTemplateRequest( - gcs_path="gcs_path_value", - ) - - # Make the request - response = client.launch_template(request=request) - - # Handle the response - print(response) - -# [END dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync] diff --git a/owl-bot-staging/v1beta3/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json b/owl-bot-staging/v1beta3/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json deleted file mode 100644 index 07ae5fa..0000000 --- a/owl-bot-staging/v1beta3/samples/generated_samples/snippet_metadata_google.dataflow.v1beta3.json +++ /dev/null @@ -1,2769 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.dataflow.v1beta3", - "version": "v1beta3" - } - ], - "language": "PYTHON", - "name": "google-cloud-dataflow-client", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceAsyncClient", - "shortName": "FlexTemplatesServiceAsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceAsyncClient.launch_flex_template", - "method": { - "fullName": "google.dataflow.v1beta3.FlexTemplatesService.LaunchFlexTemplate", - "service": { - "fullName": "google.dataflow.v1beta3.FlexTemplatesService", - "shortName": "FlexTemplatesService" - }, - "shortName": "LaunchFlexTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse", - "shortName": "launch_flex_template" - }, - "description": "Sample for LaunchFlexTemplate", - "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceClient", - "shortName": "FlexTemplatesServiceClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.FlexTemplatesServiceClient.launch_flex_template", - "method": { - "fullName": "google.dataflow.v1beta3.FlexTemplatesService.LaunchFlexTemplate", - "service": { - "fullName": "google.dataflow.v1beta3.FlexTemplatesService", - "shortName": "FlexTemplatesService" - }, - "shortName": "LaunchFlexTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.LaunchFlexTemplateResponse", - "shortName": "launch_flex_template" - }, - "description": "Sample for LaunchFlexTemplate", - "file": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_FlexTemplatesService_LaunchFlexTemplate_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_flex_templates_service_launch_flex_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.aggregated_list_jobs", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "AggregatedListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsAsyncPager", - "shortName": "aggregated_list_jobs" - }, - "description": "Sample for AggregatedListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.aggregated_list_jobs", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.AggregatedListJobs", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "AggregatedListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.AggregatedListJobsPager", - "shortName": "aggregated_list_jobs" - }, - "description": "Sample for AggregatedListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_AggregatedListJobs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_aggregated_list_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.check_active_jobs", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "CheckActiveJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", - "shortName": "check_active_jobs" - }, - "description": "Sample for CheckActiveJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.check_active_jobs", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CheckActiveJobs", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "CheckActiveJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.CheckActiveJobsResponse", - "shortName": "check_active_jobs" - }, - "description": "Sample for CheckActiveJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CheckActiveJobs_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_check_active_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.create_job", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.create_job", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.CreateJob", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_CreateJob_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_create_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.get_job", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.get_job", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.GetJob", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_GetJob_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_get_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.list_jobs", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.list_jobs", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.ListJobs", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_ListJobs_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_list_jobs_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.snapshot_job", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "SnapshotJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "snapshot_job" - }, - "description": "Sample for SnapshotJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.snapshot_job", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.SnapshotJob", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "SnapshotJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.SnapshotJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "snapshot_job" - }, - "description": "Sample for SnapshotJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_SnapshotJob_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_snapshot_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient", - "shortName": "JobsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3AsyncClient.update_job", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "UpdateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "update_job" - }, - "description": "Sample for UpdateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client", - "shortName": "JobsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.JobsV1Beta3Client.update_job", - "method": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3.UpdateJob", - "service": { - "fullName": "google.dataflow.v1beta3.JobsV1Beta3", - "shortName": "JobsV1Beta3" - }, - "shortName": "UpdateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.UpdateJobRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "update_job" - }, - "description": "Sample for UpdateJob", - "file": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_JobsV1Beta3_UpdateJob_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_jobs_v1_beta3_update_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient", - "shortName": "MessagesV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3AsyncClient.list_job_messages", - "method": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", - "service": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", - "shortName": "MessagesV1Beta3" - }, - "shortName": "ListJobMessages" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesAsyncPager", - "shortName": "list_job_messages" - }, - "description": "Sample for ListJobMessages", - "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client", - "shortName": "MessagesV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.MessagesV1Beta3Client.list_job_messages", - "method": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3.ListJobMessages", - "service": { - "fullName": "google.dataflow.v1beta3.MessagesV1Beta3", - "shortName": "MessagesV1Beta3" - }, - "shortName": "ListJobMessages" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListJobMessagesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.messages_v1_beta3.pagers.ListJobMessagesPager", - "shortName": "list_job_messages" - }, - "description": "Sample for ListJobMessages", - "file": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MessagesV1Beta3_ListJobMessages_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_messages_v1_beta3_list_job_messages_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_execution_details", - "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", - "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" - }, - "shortName": "GetJobExecutionDetails" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsAsyncPager", - "shortName": "get_job_execution_details" - }, - "description": "Sample for GetJobExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_execution_details", - "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobExecutionDetails", - "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" - }, - "shortName": "GetJobExecutionDetails" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobExecutionDetailsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetJobExecutionDetailsPager", - "shortName": "get_job_execution_details" - }, - "description": "Sample for GetJobExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobExecutionDetails_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_execution_details_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_job_metrics", - "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", - "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" - }, - "shortName": "GetJobMetrics" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", - "shortName": "get_job_metrics" - }, - "description": "Sample for GetJobMetrics", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_job_metrics", - "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetJobMetrics", - "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" - }, - "shortName": "GetJobMetrics" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetJobMetricsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.JobMetrics", - "shortName": "get_job_metrics" - }, - "description": "Sample for GetJobMetrics", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetJobMetrics_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_job_metrics_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient", - "shortName": "MetricsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3AsyncClient.get_stage_execution_details", - "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", - "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" - }, - "shortName": "GetStageExecutionDetails" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsAsyncPager", - "shortName": "get_stage_execution_details" - }, - "description": "Sample for GetStageExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client", - "shortName": "MetricsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.MetricsV1Beta3Client.get_stage_execution_details", - "method": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3.GetStageExecutionDetails", - "service": { - "fullName": "google.dataflow.v1beta3.MetricsV1Beta3", - "shortName": "MetricsV1Beta3" - }, - "shortName": "GetStageExecutionDetails" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetStageExecutionDetailsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.pagers.GetStageExecutionDetailsPager", - "shortName": "get_stage_execution_details" - }, - "description": "Sample for GetStageExecutionDetails", - "file": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_MetricsV1Beta3_GetStageExecutionDetails_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_metrics_v1_beta3_get_stage_execution_details_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", - "shortName": "SnapshotsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.delete_snapshot", - "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", - "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" - }, - "shortName": "DeleteSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", - "shortName": "delete_snapshot" - }, - "description": "Sample for DeleteSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", - "shortName": "SnapshotsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.delete_snapshot", - "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.DeleteSnapshot", - "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" - }, - "shortName": "DeleteSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.DeleteSnapshotResponse", - "shortName": "delete_snapshot" - }, - "description": "Sample for DeleteSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_DeleteSnapshot_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_delete_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", - "shortName": "SnapshotsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.get_snapshot", - "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", - "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" - }, - "shortName": "GetSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "get_snapshot" - }, - "description": "Sample for GetSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", - "shortName": "SnapshotsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.get_snapshot", - "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.GetSnapshot", - "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" - }, - "shortName": "GetSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetSnapshotRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Snapshot", - "shortName": "get_snapshot" - }, - "description": "Sample for GetSnapshot", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_GetSnapshot_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_get_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient", - "shortName": "SnapshotsV1Beta3AsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3AsyncClient.list_snapshots", - "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", - "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" - }, - "shortName": "ListSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", - "shortName": "list_snapshots" - }, - "description": "Sample for ListSnapshots", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client", - "shortName": "SnapshotsV1Beta3Client" - }, - "fullName": "google.cloud.dataflow_v1beta3.SnapshotsV1Beta3Client.list_snapshots", - "method": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3.ListSnapshots", - "service": { - "fullName": "google.dataflow.v1beta3.SnapshotsV1Beta3", - "shortName": "SnapshotsV1Beta3" - }, - "shortName": "ListSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.ListSnapshotsRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.ListSnapshotsResponse", - "shortName": "list_snapshots" - }, - "description": "Sample for ListSnapshots", - "file": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_SnapshotsV1Beta3_ListSnapshots_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_snapshots_v1_beta3_list_snapshots_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", - "shortName": "TemplatesServiceAsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.create_job_from_template", - "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", - "service": { - "fullName": "google.dataflow.v1beta3.TemplatesService", - "shortName": "TemplatesService" - }, - "shortName": "CreateJobFromTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job_from_template" - }, - "description": "Sample for CreateJobFromTemplate", - "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", - "shortName": "TemplatesServiceClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.create_job_from_template", - "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.CreateJobFromTemplate", - "service": { - "fullName": "google.dataflow.v1beta3.TemplatesService", - "shortName": "TemplatesService" - }, - "shortName": "CreateJobFromTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.CreateJobFromTemplateRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.Job", - "shortName": "create_job_from_template" - }, - "description": "Sample for CreateJobFromTemplate", - "file": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_CreateJobFromTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_templates_service_create_job_from_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", - "shortName": "TemplatesServiceAsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.get_template", - "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", - "service": { - "fullName": "google.dataflow.v1beta3.TemplatesService", - "shortName": "TemplatesService" - }, - "shortName": "GetTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", - "shortName": "get_template" - }, - "description": "Sample for GetTemplate", - "file": "dataflow_v1beta3_generated_templates_service_get_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_templates_service_get_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", - "shortName": "TemplatesServiceClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.get_template", - "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.GetTemplate", - "service": { - "fullName": "google.dataflow.v1beta3.TemplatesService", - "shortName": "TemplatesService" - }, - "shortName": "GetTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.GetTemplateRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.GetTemplateResponse", - "shortName": "get_template" - }, - "description": "Sample for GetTemplate", - "file": "dataflow_v1beta3_generated_templates_service_get_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_GetTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_templates_service_get_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient", - "shortName": "TemplatesServiceAsyncClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceAsyncClient.launch_template", - "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.LaunchTemplate", - "service": { - "fullName": "google.dataflow.v1beta3.TemplatesService", - "shortName": "TemplatesService" - }, - "shortName": "LaunchTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse", - "shortName": "launch_template" - }, - "description": "Sample for LaunchTemplate", - "file": "dataflow_v1beta3_generated_templates_service_launch_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_templates_service_launch_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient", - "shortName": "TemplatesServiceClient" - }, - "fullName": "google.cloud.dataflow_v1beta3.TemplatesServiceClient.launch_template", - "method": { - "fullName": "google.dataflow.v1beta3.TemplatesService.LaunchTemplate", - "service": { - "fullName": "google.dataflow.v1beta3.TemplatesService", - "shortName": "TemplatesService" - }, - "shortName": "LaunchTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.dataflow_v1beta3.types.LaunchTemplateRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.dataflow_v1beta3.types.LaunchTemplateResponse", - "shortName": "launch_template" - }, - "description": "Sample for LaunchTemplate", - "file": "dataflow_v1beta3_generated_templates_service_launch_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "dataflow_v1beta3_generated_TemplatesService_LaunchTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "dataflow_v1beta3_generated_templates_service_launch_template_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1beta3/scripts/fixup_dataflow_v1beta3_keywords.py b/owl-bot-staging/v1beta3/scripts/fixup_dataflow_v1beta3_keywords.py deleted file mode 100644 index 8afa679..0000000 --- a/owl-bot-staging/v1beta3/scripts/fixup_dataflow_v1beta3_keywords.py +++ /dev/null @@ -1,193 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dataflowCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'aggregated_list_jobs': ('filter', 'project_id', 'view', 'page_size', 'page_token', 'location', ), - 'check_active_jobs': ('project_id', ), - 'create_job': ('project_id', 'job', 'view', 'replace_job_id', 'location', ), - 'create_job_from_template': ('project_id', 'job_name', 'gcs_path', 'parameters', 'environment', 'location', ), - 'delete_snapshot': ('project_id', 'snapshot_id', 'location', ), - 'get_job': ('project_id', 'job_id', 'view', 'location', ), - 'get_job_execution_details': ('project_id', 'job_id', 'location', 'page_size', 'page_token', ), - 'get_job_metrics': ('project_id', 'job_id', 'start_time', 'location', ), - 'get_snapshot': ('project_id', 'snapshot_id', 'location', ), - 'get_stage_execution_details': ('project_id', 'job_id', 'location', 'stage_id', 'page_size', 'page_token', 'start_time', 'end_time', ), - 'get_template': ('project_id', 'gcs_path', 'view', 'location', ), - 'launch_flex_template': ('project_id', 'launch_parameter', 'location', 'validate_only', ), - 'launch_template': ('project_id', 'validate_only', 'gcs_path', 'dynamic_template', 'launch_parameters', 'location', ), - 'list_job_messages': ('project_id', 'job_id', 'minimum_importance', 'page_size', 'page_token', 'start_time', 'end_time', 'location', ), - 'list_jobs': ('filter', 'project_id', 'view', 'page_size', 'page_token', 'location', ), - 'list_snapshots': ('project_id', 'job_id', 'location', ), - 'snapshot_job': ('project_id', 'job_id', 'ttl', 'location', 'snapshot_sources', 'description', ), - 'update_job': ('project_id', 'job_id', 'job', 'location', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dataflowCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dataflow client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1beta3/setup.py b/owl-bot-staging/v1beta3/setup.py deleted file mode 100644 index e5f5b42..0000000 --- a/owl-bot-staging/v1beta3/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-dataflow-client' - - -description = "Google Cloud Dataflow Client API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/dataflow/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-dataflow-client" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.10.txt b/owl-bot-staging/v1beta3/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/owl-bot-staging/v1beta3/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.11.txt b/owl-bot-staging/v1beta3/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/owl-bot-staging/v1beta3/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.7.txt b/owl-bot-staging/v1beta3/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adf..0000000 --- a/owl-bot-staging/v1beta3/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.8.txt b/owl-bot-staging/v1beta3/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/owl-bot-staging/v1beta3/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta3/testing/constraints-3.9.txt b/owl-bot-staging/v1beta3/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/owl-bot-staging/v1beta3/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1beta3/tests/__init__.py b/owl-bot-staging/v1beta3/tests/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1beta3/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta3/tests/unit/__init__.py b/owl-bot-staging/v1beta3/tests/unit/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/__init__.py b/owl-bot-staging/v1beta3/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/__init__.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py deleted file mode 100644 index c696e63..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_flex_templates_service.py +++ /dev/null @@ -1,1459 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataflow_v1beta3.services.flex_templates_service import FlexTemplatesServiceAsyncClient -from google.cloud.dataflow_v1beta3.services.flex_templates_service import FlexTemplatesServiceClient -from google.cloud.dataflow_v1beta3.services.flex_templates_service import transports -from google.cloud.dataflow_v1beta3.types import environment -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates -from google.oauth2 import service_account -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert FlexTemplatesServiceClient._get_default_mtls_endpoint(None) is None - assert FlexTemplatesServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert FlexTemplatesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert FlexTemplatesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert FlexTemplatesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert FlexTemplatesServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (FlexTemplatesServiceClient, "grpc"), - (FlexTemplatesServiceAsyncClient, "grpc_asyncio"), - (FlexTemplatesServiceClient, "rest"), -]) -def test_flex_templates_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.FlexTemplatesServiceGrpcTransport, "grpc"), - (transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.FlexTemplatesServiceRestTransport, "rest"), -]) -def test_flex_templates_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (FlexTemplatesServiceClient, "grpc"), - (FlexTemplatesServiceAsyncClient, "grpc_asyncio"), - (FlexTemplatesServiceClient, "rest"), -]) -def test_flex_templates_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -def test_flex_templates_service_client_get_transport_class(): - transport = FlexTemplatesServiceClient.get_transport_class() - available_transports = [ - transports.FlexTemplatesServiceGrpcTransport, - transports.FlexTemplatesServiceRestTransport, - ] - assert transport in available_transports - - transport = FlexTemplatesServiceClient.get_transport_class("grpc") - assert transport == transports.FlexTemplatesServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc"), - (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest"), -]) -@mock.patch.object(FlexTemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceClient)) -@mock.patch.object(FlexTemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceAsyncClient)) -def test_flex_templates_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(FlexTemplatesServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(FlexTemplatesServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc", "true"), - (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc", "false"), - (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest", "true"), - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(FlexTemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceClient)) -@mock.patch.object(FlexTemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_flex_templates_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - FlexTemplatesServiceClient, FlexTemplatesServiceAsyncClient -]) -@mock.patch.object(FlexTemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceClient)) -@mock.patch.object(FlexTemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(FlexTemplatesServiceAsyncClient)) -def test_flex_templates_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc"), - (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest"), -]) -def test_flex_templates_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc", grpc_helpers), - (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceRestTransport, "rest", None), -]) -def test_flex_templates_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_flex_templates_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = FlexTemplatesServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport, "grpc", grpc_helpers), - (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_flex_templates_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=None, - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - templates.LaunchFlexTemplateRequest, - dict, -]) -def test_launch_flex_template(request_type, transport: str = 'grpc'): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_flex_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = templates.LaunchFlexTemplateResponse( - ) - response = client.launch_flex_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == templates.LaunchFlexTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.LaunchFlexTemplateResponse) - - -def test_launch_flex_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_flex_template), - '__call__') as call: - client.launch_flex_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == templates.LaunchFlexTemplateRequest() - -@pytest.mark.asyncio -async def test_launch_flex_template_async(transport: str = 'grpc_asyncio', request_type=templates.LaunchFlexTemplateRequest): - client = FlexTemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_flex_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(templates.LaunchFlexTemplateResponse( - )) - response = await client.launch_flex_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == templates.LaunchFlexTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.LaunchFlexTemplateResponse) - - -@pytest.mark.asyncio -async def test_launch_flex_template_async_from_dict(): - await test_launch_flex_template_async(request_type=dict) - - -def test_launch_flex_template_field_headers(): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = templates.LaunchFlexTemplateRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_flex_template), - '__call__') as call: - call.return_value = templates.LaunchFlexTemplateResponse() - client.launch_flex_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_launch_flex_template_field_headers_async(): - client = FlexTemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = templates.LaunchFlexTemplateRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_flex_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(templates.LaunchFlexTemplateResponse()) - await client.launch_flex_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - templates.LaunchFlexTemplateRequest, - dict, -]) -def test_launch_flex_template_rest(request_type): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = templates.LaunchFlexTemplateResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = templates.LaunchFlexTemplateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.launch_flex_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.LaunchFlexTemplateResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_launch_flex_template_rest_interceptors(null_interceptor): - transport = transports.FlexTemplatesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.FlexTemplatesServiceRestInterceptor(), - ) - client = FlexTemplatesServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.FlexTemplatesServiceRestInterceptor, "post_launch_flex_template") as post, \ - mock.patch.object(transports.FlexTemplatesServiceRestInterceptor, "pre_launch_flex_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = templates.LaunchFlexTemplateRequest.pb(templates.LaunchFlexTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = templates.LaunchFlexTemplateResponse.to_json(templates.LaunchFlexTemplateResponse()) - - request = templates.LaunchFlexTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = templates.LaunchFlexTemplateResponse() - - client.launch_flex_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_launch_flex_template_rest_bad_request(transport: str = 'rest', request_type=templates.LaunchFlexTemplateRequest): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.launch_flex_template(request) - - -def test_launch_flex_template_rest_error(): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.FlexTemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.FlexTemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FlexTemplatesServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.FlexTemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FlexTemplatesServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = FlexTemplatesServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.FlexTemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = FlexTemplatesServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.FlexTemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = FlexTemplatesServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.FlexTemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.FlexTemplatesServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.FlexTemplatesServiceGrpcTransport, - transports.FlexTemplatesServiceGrpcAsyncIOTransport, - transports.FlexTemplatesServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = FlexTemplatesServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.FlexTemplatesServiceGrpcTransport, - ) - -def test_flex_templates_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.FlexTemplatesServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_flex_templates_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.FlexTemplatesServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'launch_flex_template', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_flex_templates_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FlexTemplatesServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id="octopus", - ) - - -def test_flex_templates_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.flex_templates_service.transports.FlexTemplatesServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FlexTemplatesServiceTransport() - adc.assert_called_once() - - -def test_flex_templates_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - FlexTemplatesServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FlexTemplatesServiceGrpcTransport, - transports.FlexTemplatesServiceGrpcAsyncIOTransport, - ], -) -def test_flex_templates_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.FlexTemplatesServiceGrpcTransport, - transports.FlexTemplatesServiceGrpcAsyncIOTransport, - transports.FlexTemplatesServiceRestTransport, - ], -) -def test_flex_templates_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.FlexTemplatesServiceGrpcTransport, grpc_helpers), - (transports.FlexTemplatesServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_flex_templates_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=["1", "2"], - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport]) -def test_flex_templates_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_flex_templates_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.FlexTemplatesServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_flex_templates_service_host_no_port(transport_name): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_flex_templates_service_host_with_port(transport_name): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_flex_templates_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = FlexTemplatesServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = FlexTemplatesServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.launch_flex_template._session - session2 = client2.transport.launch_flex_template._session - assert session1 != session2 -def test_flex_templates_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FlexTemplatesServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_flex_templates_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.FlexTemplatesServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport]) -def test_flex_templates_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.FlexTemplatesServiceGrpcTransport, transports.FlexTemplatesServiceGrpcAsyncIOTransport]) -def test_flex_templates_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = FlexTemplatesServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = FlexTemplatesServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = FlexTemplatesServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = FlexTemplatesServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = FlexTemplatesServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = FlexTemplatesServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = FlexTemplatesServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = FlexTemplatesServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = FlexTemplatesServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = FlexTemplatesServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = FlexTemplatesServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = FlexTemplatesServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = FlexTemplatesServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = FlexTemplatesServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = FlexTemplatesServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.FlexTemplatesServiceTransport, '_prep_wrapped_messages') as prep: - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.FlexTemplatesServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = FlexTemplatesServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = FlexTemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = FlexTemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (FlexTemplatesServiceClient, transports.FlexTemplatesServiceGrpcTransport), - (FlexTemplatesServiceAsyncClient, transports.FlexTemplatesServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py deleted file mode 100644 index fd61443..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_jobs_v1_beta3.py +++ /dev/null @@ -1,3644 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import JobsV1Beta3AsyncClient -from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import JobsV1Beta3Client -from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.services.jobs_v1_beta3 import transports -from google.cloud.dataflow_v1beta3.types import environment -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import snapshots -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import struct_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert JobsV1Beta3Client._get_default_mtls_endpoint(None) is None - assert JobsV1Beta3Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert JobsV1Beta3Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert JobsV1Beta3Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert JobsV1Beta3Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert JobsV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (JobsV1Beta3Client, "grpc"), - (JobsV1Beta3AsyncClient, "grpc_asyncio"), - (JobsV1Beta3Client, "rest"), -]) -def test_jobs_v1_beta3_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.JobsV1Beta3GrpcTransport, "grpc"), - (transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.JobsV1Beta3RestTransport, "rest"), -]) -def test_jobs_v1_beta3_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (JobsV1Beta3Client, "grpc"), - (JobsV1Beta3AsyncClient, "grpc_asyncio"), - (JobsV1Beta3Client, "rest"), -]) -def test_jobs_v1_beta3_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -def test_jobs_v1_beta3_client_get_transport_class(): - transport = JobsV1Beta3Client.get_transport_class() - available_transports = [ - transports.JobsV1Beta3GrpcTransport, - transports.JobsV1Beta3RestTransport, - ] - assert transport in available_transports - - transport = JobsV1Beta3Client.get_transport_class("grpc") - assert transport == transports.JobsV1Beta3GrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc"), - (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), -]) -@mock.patch.object(JobsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3Client)) -@mock.patch.object(JobsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3AsyncClient)) -def test_jobs_v1_beta3_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(JobsV1Beta3Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(JobsV1Beta3Client, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc", "true"), - (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc", "false"), - (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", "true"), - (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", "false"), -]) -@mock.patch.object(JobsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3Client)) -@mock.patch.object(JobsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3AsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_jobs_v1_beta3_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - JobsV1Beta3Client, JobsV1Beta3AsyncClient -]) -@mock.patch.object(JobsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3Client)) -@mock.patch.object(JobsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(JobsV1Beta3AsyncClient)) -def test_jobs_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc"), - (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest"), -]) -def test_jobs_v1_beta3_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc", grpc_helpers), - (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (JobsV1Beta3Client, transports.JobsV1Beta3RestTransport, "rest", None), -]) -def test_jobs_v1_beta3_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_jobs_v1_beta3_client_client_options_from_dict(): - with mock.patch('google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = JobsV1Beta3Client( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport, "grpc", grpc_helpers), - (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_jobs_v1_beta3_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=None, - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.CreateJobRequest, - dict, -]) -def test_create_job(request_type, transport: str = 'grpc'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - ) - response = client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -def test_create_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - client.create_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CreateJobRequest() - -@pytest.mark.asyncio -async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=jobs.CreateJobRequest): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - )) - response = await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -@pytest.mark.asyncio -async def test_create_job_async_from_dict(): - await test_create_job_async(request_type=dict) - - -def test_create_job_field_headers(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.CreateJobRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = jobs.Job() - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.CreateJobRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - jobs.GetJobRequest, - dict, -]) -def test_get_job(request_type, transport: str = 'grpc'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -def test_get_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=jobs.GetJobRequest): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - - -def test_get_job_field_headers(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.GetJobRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = jobs.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.GetJobRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - jobs.UpdateJobRequest, - dict, -]) -def test_update_job(request_type, transport: str = 'grpc'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - ) - response = client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -def test_update_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - client.update_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() - -@pytest.mark.asyncio -async def test_update_job_async(transport: str = 'grpc_asyncio', request_type=jobs.UpdateJobRequest): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - )) - response = await client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.UpdateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -@pytest.mark.asyncio -async def test_update_job_async_from_dict(): - await test_update_job_async(request_type=dict) - - -def test_update_job_field_headers(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.UpdateJobRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - call.return_value = jobs.Job() - client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_job_field_headers_async(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.UpdateJobRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - await client.update_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - jobs.ListJobsRequest, - dict, -]) -def test_list_jobs(request_type, transport: str = 'grpc'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.ListJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=jobs.ListJobsRequest): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - - -def test_list_jobs_field_headers(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.ListJobsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = jobs.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.ListJobsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project_id', ''), - ('location', ''), - )), - ) - pager = client.list_jobs(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, jobs.Job) - for i in results) -def test_list_jobs_pages(transport_name: str = "grpc"): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, jobs.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_jobs(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - jobs.ListJobsRequest, - dict, -]) -def test_aggregated_list_jobs(request_type, transport: str = 'grpc'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.ListJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.aggregated_list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AggregatedListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_aggregated_list_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__') as call: - client.aggregated_list_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - -@pytest.mark.asyncio -async def test_aggregated_list_jobs_async(transport: str = 'grpc_asyncio', request_type=jobs.ListJobsRequest): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.aggregated_list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AggregatedListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_aggregated_list_jobs_async_from_dict(): - await test_aggregated_list_jobs_async(request_type=dict) - - -def test_aggregated_list_jobs_field_headers(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.ListJobsRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__') as call: - call.return_value = jobs.ListJobsResponse() - client.aggregated_list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_aggregated_list_jobs_field_headers_async(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.ListJobsRequest() - - request.project_id = 'project_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.ListJobsResponse()) - await client.aggregated_list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value', - ) in kw['metadata'] - - -def test_aggregated_list_jobs_pager(transport_name: str = "grpc"): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project_id', ''), - )), - ) - pager = client.aggregated_list_jobs(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, jobs.Job) - for i in results) -def test_aggregated_list_jobs_pages(transport_name: str = "grpc"): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.aggregated_list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_aggregated_list_jobs_async_pager(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.aggregated_list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, jobs.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_aggregated_list_jobs_async_pages(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.aggregated_list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.aggregated_list_jobs(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - jobs.CheckActiveJobsRequest, - dict, -]) -def test_check_active_jobs(request_type, transport: str = 'grpc'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_active_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.CheckActiveJobsResponse( - active_jobs_exist=True, - ) - response = client.check_active_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CheckActiveJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.CheckActiveJobsResponse) - assert response.active_jobs_exist is True - - -def test_check_active_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_active_jobs), - '__call__') as call: - client.check_active_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CheckActiveJobsRequest() - -@pytest.mark.asyncio -async def test_check_active_jobs_async(transport: str = 'grpc_asyncio', request_type=jobs.CheckActiveJobsRequest): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.check_active_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.CheckActiveJobsResponse( - active_jobs_exist=True, - )) - response = await client.check_active_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.CheckActiveJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.CheckActiveJobsResponse) - assert response.active_jobs_exist is True - - -@pytest.mark.asyncio -async def test_check_active_jobs_async_from_dict(): - await test_check_active_jobs_async(request_type=dict) - - -@pytest.mark.parametrize("request_type", [ - jobs.SnapshotJobRequest, - dict, -]) -def test_snapshot_job(request_type, transport: str = 'grpc'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.snapshot_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = snapshots.Snapshot( - id='id_value', - project_id='project_id_value', - source_job_id='source_job_id_value', - state=snapshots.SnapshotState.PENDING, - description='description_value', - disk_size_bytes=1611, - region='region_value', - ) - response = client.snapshot_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SnapshotJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.Snapshot) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.source_job_id == 'source_job_id_value' - assert response.state == snapshots.SnapshotState.PENDING - assert response.description == 'description_value' - assert response.disk_size_bytes == 1611 - assert response.region == 'region_value' - - -def test_snapshot_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.snapshot_job), - '__call__') as call: - client.snapshot_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SnapshotJobRequest() - -@pytest.mark.asyncio -async def test_snapshot_job_async(transport: str = 'grpc_asyncio', request_type=jobs.SnapshotJobRequest): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.snapshot_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(snapshots.Snapshot( - id='id_value', - project_id='project_id_value', - source_job_id='source_job_id_value', - state=snapshots.SnapshotState.PENDING, - description='description_value', - disk_size_bytes=1611, - region='region_value', - )) - response = await client.snapshot_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == jobs.SnapshotJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.Snapshot) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.source_job_id == 'source_job_id_value' - assert response.state == snapshots.SnapshotState.PENDING - assert response.description == 'description_value' - assert response.disk_size_bytes == 1611 - assert response.region == 'region_value' - - -@pytest.mark.asyncio -async def test_snapshot_job_async_from_dict(): - await test_snapshot_job_async(request_type=dict) - - -def test_snapshot_job_field_headers(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.SnapshotJobRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.snapshot_job), - '__call__') as call: - call.return_value = snapshots.Snapshot() - client.snapshot_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_snapshot_job_field_headers_async(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = jobs.SnapshotJobRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.snapshot_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(snapshots.Snapshot()) - await client.snapshot_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - jobs.CreateJobRequest, - dict, -]) -def test_create_job_rest(request_type): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request_init["job"] = {'id': 'id_value', 'project_id': 'project_id_value', 'name': 'name_value', 'type_': 1, 'environment': {'temp_storage_prefix': 'temp_storage_prefix_value', 'cluster_manager_api_service': 'cluster_manager_api_service_value', 'experiments': ['experiments_value1', 'experiments_value2'], 'service_options': ['service_options_value1', 'service_options_value2'], 'service_kms_key_name': 'service_kms_key_name_value', 'worker_pools': [{'kind': 'kind_value', 'num_workers': 1212, 'packages': [{'name': 'name_value', 'location': 'location_value'}], 'default_package_set': 1, 'machine_type': 'machine_type_value', 'teardown_policy': 1, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'disk_source_image': 'disk_source_image_value', 'zone': 'zone_value', 'taskrunner_settings': {'task_user': 'task_user_value', 'task_group': 'task_group_value', 'oauth_scopes': ['oauth_scopes_value1', 'oauth_scopes_value2'], 'base_url': 'base_url_value', 'dataflow_api_version': 'dataflow_api_version_value', 'parallel_worker_settings': {'base_url': 'base_url_value', 'reporting_enabled': True, 'service_path': 'service_path_value', 'shuffle_service_path': 'shuffle_service_path_value', 'worker_id': 'worker_id_value', 'temp_storage_prefix': 'temp_storage_prefix_value'}, 'base_task_dir': 'base_task_dir_value', 'continue_on_exception': True, 'log_to_serialconsole': True, 'alsologtostderr': True, 'log_upload_location': 'log_upload_location_value', 'log_dir': 'log_dir_value', 'temp_storage_prefix': 'temp_storage_prefix_value', 'harness_command': 'harness_command_value', 'workflow_file_name': 'workflow_file_name_value', 'commandlines_file_name': 'commandlines_file_name_value', 'vm_id': 'vm_id_value', 'language_hint': 'language_hint_value', 'streaming_worker_main_class': 'streaming_worker_main_class_value'}, 'on_host_maintenance': 'on_host_maintenance_value', 'data_disks': [{'size_gb': 739, 'disk_type': 'disk_type_value', 'mount_point': 'mount_point_value'}], 'metadata': {}, 'autoscaling_settings': {'algorithm': 1, 'max_num_workers': 1633}, 'pool_args': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'worker_harness_container_image': 'worker_harness_container_image_value', 'num_threads_per_worker': 2361, 'ip_configuration': 1, 'sdk_harness_container_images': [{'container_image': 'container_image_value', 'use_single_core_per_container': True, 'environment_id': 'environment_id_value', 'capabilities': ['capabilities_value1', 'capabilities_value2']}]}], 'user_agent': {'fields': {}}, 'version': {}, 'dataset': 'dataset_value', 'sdk_pipeline_options': {}, 'internal_experiments': {}, 'service_account_email': 'service_account_email_value', 'flex_resource_scheduling_goal': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'shuffle_mode': 1, 'debug_options': {'enable_hot_key_logging': True}}, 'steps': [{'kind': 'kind_value', 'name': 'name_value', 'properties': {}}], 'steps_location': 'steps_location_value', 'current_state': 1, 'current_state_time': {'seconds': 751, 'nanos': 543}, 'requested_state': 1, 'execution_info': {'stages': {}}, 'create_time': {}, 'replace_job_id': 'replace_job_id_value', 'transform_name_mapping': {}, 'client_request_id': 'client_request_id_value', 'replaced_by_job_id': 'replaced_by_job_id_value', 'temp_files': ['temp_files_value1', 'temp_files_value2'], 'labels': {}, 'location': 'location_value', 'pipeline_description': {'original_pipeline_transform': [{'kind': 1, 'id': 'id_value', 'name': 'name_value', 'display_data': [{'key': 'key_value', 'namespace': 'namespace_value', 'str_value': 'str_value_value', 'int64_value': 1073, 'float_value': 0.117, 'java_class_value': 'java_class_value_value', 'timestamp_value': {}, 'duration_value': {'seconds': 751, 'nanos': 543}, 'bool_value': True, 'short_str_value': 'short_str_value_value', 'url': 'url_value', 'label': 'label_value'}], 'output_collection_name': ['output_collection_name_value1', 'output_collection_name_value2'], 'input_collection_name': ['input_collection_name_value1', 'input_collection_name_value2']}], 'execution_pipeline_stage': [{'name': 'name_value', 'id': 'id_value', 'kind': 1, 'input_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value', 'size_bytes': 1089}], 'output_source': {}, 'prerequisite_stage': ['prerequisite_stage_value1', 'prerequisite_stage_value2'], 'component_transform': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform': 'original_transform_value'}], 'component_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value'}]}], 'display_data': {}}, 'stage_states': [{'execution_stage_name': 'execution_stage_name_value', 'execution_stage_state': 1, 'current_state_time': {}}], 'job_metadata': {'sdk_version': {'version': 'version_value', 'version_display_name': 'version_display_name_value', 'sdk_support_status': 1}, 'spanner_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'database_id': 'database_id_value'}], 'bigquery_details': [{'table': 'table_value', 'dataset': 'dataset_value', 'project_id': 'project_id_value', 'query': 'query_value'}], 'big_table_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'table_id': 'table_id_value'}], 'pubsub_details': [{'topic': 'topic_value', 'subscription': 'subscription_value'}], 'file_details': [{'file_pattern': 'file_pattern_value'}], 'datastore_details': [{'namespace': 'namespace_value', 'project_id': 'project_id_value'}]}, 'start_time': {}, 'created_from_snapshot_id': 'created_from_snapshot_id_value', 'satisfies_pzs': True} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_rest_interceptors(null_interceptor): - transport = transports.JobsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), - ) - client = JobsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_create_job") as post, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_create_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.CreateJobRequest.pb(jobs.CreateJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.Job.to_json(jobs.Job()) - - request = jobs.CreateJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.Job() - - client.create_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_job_rest_bad_request(transport: str = 'rest', request_type=jobs.CreateJobRequest): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request_init["job"] = {'id': 'id_value', 'project_id': 'project_id_value', 'name': 'name_value', 'type_': 1, 'environment': {'temp_storage_prefix': 'temp_storage_prefix_value', 'cluster_manager_api_service': 'cluster_manager_api_service_value', 'experiments': ['experiments_value1', 'experiments_value2'], 'service_options': ['service_options_value1', 'service_options_value2'], 'service_kms_key_name': 'service_kms_key_name_value', 'worker_pools': [{'kind': 'kind_value', 'num_workers': 1212, 'packages': [{'name': 'name_value', 'location': 'location_value'}], 'default_package_set': 1, 'machine_type': 'machine_type_value', 'teardown_policy': 1, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'disk_source_image': 'disk_source_image_value', 'zone': 'zone_value', 'taskrunner_settings': {'task_user': 'task_user_value', 'task_group': 'task_group_value', 'oauth_scopes': ['oauth_scopes_value1', 'oauth_scopes_value2'], 'base_url': 'base_url_value', 'dataflow_api_version': 'dataflow_api_version_value', 'parallel_worker_settings': {'base_url': 'base_url_value', 'reporting_enabled': True, 'service_path': 'service_path_value', 'shuffle_service_path': 'shuffle_service_path_value', 'worker_id': 'worker_id_value', 'temp_storage_prefix': 'temp_storage_prefix_value'}, 'base_task_dir': 'base_task_dir_value', 'continue_on_exception': True, 'log_to_serialconsole': True, 'alsologtostderr': True, 'log_upload_location': 'log_upload_location_value', 'log_dir': 'log_dir_value', 'temp_storage_prefix': 'temp_storage_prefix_value', 'harness_command': 'harness_command_value', 'workflow_file_name': 'workflow_file_name_value', 'commandlines_file_name': 'commandlines_file_name_value', 'vm_id': 'vm_id_value', 'language_hint': 'language_hint_value', 'streaming_worker_main_class': 'streaming_worker_main_class_value'}, 'on_host_maintenance': 'on_host_maintenance_value', 'data_disks': [{'size_gb': 739, 'disk_type': 'disk_type_value', 'mount_point': 'mount_point_value'}], 'metadata': {}, 'autoscaling_settings': {'algorithm': 1, 'max_num_workers': 1633}, 'pool_args': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'worker_harness_container_image': 'worker_harness_container_image_value', 'num_threads_per_worker': 2361, 'ip_configuration': 1, 'sdk_harness_container_images': [{'container_image': 'container_image_value', 'use_single_core_per_container': True, 'environment_id': 'environment_id_value', 'capabilities': ['capabilities_value1', 'capabilities_value2']}]}], 'user_agent': {'fields': {}}, 'version': {}, 'dataset': 'dataset_value', 'sdk_pipeline_options': {}, 'internal_experiments': {}, 'service_account_email': 'service_account_email_value', 'flex_resource_scheduling_goal': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'shuffle_mode': 1, 'debug_options': {'enable_hot_key_logging': True}}, 'steps': [{'kind': 'kind_value', 'name': 'name_value', 'properties': {}}], 'steps_location': 'steps_location_value', 'current_state': 1, 'current_state_time': {'seconds': 751, 'nanos': 543}, 'requested_state': 1, 'execution_info': {'stages': {}}, 'create_time': {}, 'replace_job_id': 'replace_job_id_value', 'transform_name_mapping': {}, 'client_request_id': 'client_request_id_value', 'replaced_by_job_id': 'replaced_by_job_id_value', 'temp_files': ['temp_files_value1', 'temp_files_value2'], 'labels': {}, 'location': 'location_value', 'pipeline_description': {'original_pipeline_transform': [{'kind': 1, 'id': 'id_value', 'name': 'name_value', 'display_data': [{'key': 'key_value', 'namespace': 'namespace_value', 'str_value': 'str_value_value', 'int64_value': 1073, 'float_value': 0.117, 'java_class_value': 'java_class_value_value', 'timestamp_value': {}, 'duration_value': {'seconds': 751, 'nanos': 543}, 'bool_value': True, 'short_str_value': 'short_str_value_value', 'url': 'url_value', 'label': 'label_value'}], 'output_collection_name': ['output_collection_name_value1', 'output_collection_name_value2'], 'input_collection_name': ['input_collection_name_value1', 'input_collection_name_value2']}], 'execution_pipeline_stage': [{'name': 'name_value', 'id': 'id_value', 'kind': 1, 'input_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value', 'size_bytes': 1089}], 'output_source': {}, 'prerequisite_stage': ['prerequisite_stage_value1', 'prerequisite_stage_value2'], 'component_transform': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform': 'original_transform_value'}], 'component_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value'}]}], 'display_data': {}}, 'stage_states': [{'execution_stage_name': 'execution_stage_name_value', 'execution_stage_state': 1, 'current_state_time': {}}], 'job_metadata': {'sdk_version': {'version': 'version_value', 'version_display_name': 'version_display_name_value', 'sdk_support_status': 1}, 'spanner_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'database_id': 'database_id_value'}], 'bigquery_details': [{'table': 'table_value', 'dataset': 'dataset_value', 'project_id': 'project_id_value', 'query': 'query_value'}], 'big_table_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'table_id': 'table_id_value'}], 'pubsub_details': [{'topic': 'topic_value', 'subscription': 'subscription_value'}], 'file_details': [{'file_pattern': 'file_pattern_value'}], 'datastore_details': [{'namespace': 'namespace_value', 'project_id': 'project_id_value'}]}, 'start_time': {}, 'created_from_snapshot_id': 'created_from_snapshot_id_value', 'satisfies_pzs': True} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_job(request) - - -def test_create_job_rest_error(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.GetJobRequest, - dict, -]) -def test_get_job_rest(request_type): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_rest_interceptors(null_interceptor): - transport = transports.JobsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), - ) - client = JobsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_get_job") as post, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_get_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.GetJobRequest.pb(jobs.GetJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.Job.to_json(jobs.Job()) - - request = jobs.GetJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.Job() - - client.get_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_job_rest_bad_request(transport: str = 'rest', request_type=jobs.GetJobRequest): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_job(request) - - -def test_get_job_rest_error(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.UpdateJobRequest, - dict, -]) -def test_update_job_rest(request_type): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request_init["job"] = {'id': 'id_value', 'project_id': 'project_id_value', 'name': 'name_value', 'type_': 1, 'environment': {'temp_storage_prefix': 'temp_storage_prefix_value', 'cluster_manager_api_service': 'cluster_manager_api_service_value', 'experiments': ['experiments_value1', 'experiments_value2'], 'service_options': ['service_options_value1', 'service_options_value2'], 'service_kms_key_name': 'service_kms_key_name_value', 'worker_pools': [{'kind': 'kind_value', 'num_workers': 1212, 'packages': [{'name': 'name_value', 'location': 'location_value'}], 'default_package_set': 1, 'machine_type': 'machine_type_value', 'teardown_policy': 1, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'disk_source_image': 'disk_source_image_value', 'zone': 'zone_value', 'taskrunner_settings': {'task_user': 'task_user_value', 'task_group': 'task_group_value', 'oauth_scopes': ['oauth_scopes_value1', 'oauth_scopes_value2'], 'base_url': 'base_url_value', 'dataflow_api_version': 'dataflow_api_version_value', 'parallel_worker_settings': {'base_url': 'base_url_value', 'reporting_enabled': True, 'service_path': 'service_path_value', 'shuffle_service_path': 'shuffle_service_path_value', 'worker_id': 'worker_id_value', 'temp_storage_prefix': 'temp_storage_prefix_value'}, 'base_task_dir': 'base_task_dir_value', 'continue_on_exception': True, 'log_to_serialconsole': True, 'alsologtostderr': True, 'log_upload_location': 'log_upload_location_value', 'log_dir': 'log_dir_value', 'temp_storage_prefix': 'temp_storage_prefix_value', 'harness_command': 'harness_command_value', 'workflow_file_name': 'workflow_file_name_value', 'commandlines_file_name': 'commandlines_file_name_value', 'vm_id': 'vm_id_value', 'language_hint': 'language_hint_value', 'streaming_worker_main_class': 'streaming_worker_main_class_value'}, 'on_host_maintenance': 'on_host_maintenance_value', 'data_disks': [{'size_gb': 739, 'disk_type': 'disk_type_value', 'mount_point': 'mount_point_value'}], 'metadata': {}, 'autoscaling_settings': {'algorithm': 1, 'max_num_workers': 1633}, 'pool_args': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'worker_harness_container_image': 'worker_harness_container_image_value', 'num_threads_per_worker': 2361, 'ip_configuration': 1, 'sdk_harness_container_images': [{'container_image': 'container_image_value', 'use_single_core_per_container': True, 'environment_id': 'environment_id_value', 'capabilities': ['capabilities_value1', 'capabilities_value2']}]}], 'user_agent': {'fields': {}}, 'version': {}, 'dataset': 'dataset_value', 'sdk_pipeline_options': {}, 'internal_experiments': {}, 'service_account_email': 'service_account_email_value', 'flex_resource_scheduling_goal': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'shuffle_mode': 1, 'debug_options': {'enable_hot_key_logging': True}}, 'steps': [{'kind': 'kind_value', 'name': 'name_value', 'properties': {}}], 'steps_location': 'steps_location_value', 'current_state': 1, 'current_state_time': {'seconds': 751, 'nanos': 543}, 'requested_state': 1, 'execution_info': {'stages': {}}, 'create_time': {}, 'replace_job_id': 'replace_job_id_value', 'transform_name_mapping': {}, 'client_request_id': 'client_request_id_value', 'replaced_by_job_id': 'replaced_by_job_id_value', 'temp_files': ['temp_files_value1', 'temp_files_value2'], 'labels': {}, 'location': 'location_value', 'pipeline_description': {'original_pipeline_transform': [{'kind': 1, 'id': 'id_value', 'name': 'name_value', 'display_data': [{'key': 'key_value', 'namespace': 'namespace_value', 'str_value': 'str_value_value', 'int64_value': 1073, 'float_value': 0.117, 'java_class_value': 'java_class_value_value', 'timestamp_value': {}, 'duration_value': {'seconds': 751, 'nanos': 543}, 'bool_value': True, 'short_str_value': 'short_str_value_value', 'url': 'url_value', 'label': 'label_value'}], 'output_collection_name': ['output_collection_name_value1', 'output_collection_name_value2'], 'input_collection_name': ['input_collection_name_value1', 'input_collection_name_value2']}], 'execution_pipeline_stage': [{'name': 'name_value', 'id': 'id_value', 'kind': 1, 'input_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value', 'size_bytes': 1089}], 'output_source': {}, 'prerequisite_stage': ['prerequisite_stage_value1', 'prerequisite_stage_value2'], 'component_transform': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform': 'original_transform_value'}], 'component_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value'}]}], 'display_data': {}}, 'stage_states': [{'execution_stage_name': 'execution_stage_name_value', 'execution_stage_state': 1, 'current_state_time': {}}], 'job_metadata': {'sdk_version': {'version': 'version_value', 'version_display_name': 'version_display_name_value', 'sdk_support_status': 1}, 'spanner_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'database_id': 'database_id_value'}], 'bigquery_details': [{'table': 'table_value', 'dataset': 'dataset_value', 'project_id': 'project_id_value', 'query': 'query_value'}], 'big_table_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'table_id': 'table_id_value'}], 'pubsub_details': [{'topic': 'topic_value', 'subscription': 'subscription_value'}], 'file_details': [{'file_pattern': 'file_pattern_value'}], 'datastore_details': [{'namespace': 'namespace_value', 'project_id': 'project_id_value'}]}, 'start_time': {}, 'created_from_snapshot_id': 'created_from_snapshot_id_value', 'satisfies_pzs': True} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_job_rest_interceptors(null_interceptor): - transport = transports.JobsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), - ) - client = JobsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_update_job") as post, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_update_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.UpdateJobRequest.pb(jobs.UpdateJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.Job.to_json(jobs.Job()) - - request = jobs.UpdateJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.Job() - - client.update_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_job_rest_bad_request(transport: str = 'rest', request_type=jobs.UpdateJobRequest): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request_init["job"] = {'id': 'id_value', 'project_id': 'project_id_value', 'name': 'name_value', 'type_': 1, 'environment': {'temp_storage_prefix': 'temp_storage_prefix_value', 'cluster_manager_api_service': 'cluster_manager_api_service_value', 'experiments': ['experiments_value1', 'experiments_value2'], 'service_options': ['service_options_value1', 'service_options_value2'], 'service_kms_key_name': 'service_kms_key_name_value', 'worker_pools': [{'kind': 'kind_value', 'num_workers': 1212, 'packages': [{'name': 'name_value', 'location': 'location_value'}], 'default_package_set': 1, 'machine_type': 'machine_type_value', 'teardown_policy': 1, 'disk_size_gb': 1261, 'disk_type': 'disk_type_value', 'disk_source_image': 'disk_source_image_value', 'zone': 'zone_value', 'taskrunner_settings': {'task_user': 'task_user_value', 'task_group': 'task_group_value', 'oauth_scopes': ['oauth_scopes_value1', 'oauth_scopes_value2'], 'base_url': 'base_url_value', 'dataflow_api_version': 'dataflow_api_version_value', 'parallel_worker_settings': {'base_url': 'base_url_value', 'reporting_enabled': True, 'service_path': 'service_path_value', 'shuffle_service_path': 'shuffle_service_path_value', 'worker_id': 'worker_id_value', 'temp_storage_prefix': 'temp_storage_prefix_value'}, 'base_task_dir': 'base_task_dir_value', 'continue_on_exception': True, 'log_to_serialconsole': True, 'alsologtostderr': True, 'log_upload_location': 'log_upload_location_value', 'log_dir': 'log_dir_value', 'temp_storage_prefix': 'temp_storage_prefix_value', 'harness_command': 'harness_command_value', 'workflow_file_name': 'workflow_file_name_value', 'commandlines_file_name': 'commandlines_file_name_value', 'vm_id': 'vm_id_value', 'language_hint': 'language_hint_value', 'streaming_worker_main_class': 'streaming_worker_main_class_value'}, 'on_host_maintenance': 'on_host_maintenance_value', 'data_disks': [{'size_gb': 739, 'disk_type': 'disk_type_value', 'mount_point': 'mount_point_value'}], 'metadata': {}, 'autoscaling_settings': {'algorithm': 1, 'max_num_workers': 1633}, 'pool_args': {'type_url': 'type.googleapis.com/google.protobuf.Duration', 'value': b'\x08\x0c\x10\xdb\x07'}, 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'worker_harness_container_image': 'worker_harness_container_image_value', 'num_threads_per_worker': 2361, 'ip_configuration': 1, 'sdk_harness_container_images': [{'container_image': 'container_image_value', 'use_single_core_per_container': True, 'environment_id': 'environment_id_value', 'capabilities': ['capabilities_value1', 'capabilities_value2']}]}], 'user_agent': {'fields': {}}, 'version': {}, 'dataset': 'dataset_value', 'sdk_pipeline_options': {}, 'internal_experiments': {}, 'service_account_email': 'service_account_email_value', 'flex_resource_scheduling_goal': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'shuffle_mode': 1, 'debug_options': {'enable_hot_key_logging': True}}, 'steps': [{'kind': 'kind_value', 'name': 'name_value', 'properties': {}}], 'steps_location': 'steps_location_value', 'current_state': 1, 'current_state_time': {'seconds': 751, 'nanos': 543}, 'requested_state': 1, 'execution_info': {'stages': {}}, 'create_time': {}, 'replace_job_id': 'replace_job_id_value', 'transform_name_mapping': {}, 'client_request_id': 'client_request_id_value', 'replaced_by_job_id': 'replaced_by_job_id_value', 'temp_files': ['temp_files_value1', 'temp_files_value2'], 'labels': {}, 'location': 'location_value', 'pipeline_description': {'original_pipeline_transform': [{'kind': 1, 'id': 'id_value', 'name': 'name_value', 'display_data': [{'key': 'key_value', 'namespace': 'namespace_value', 'str_value': 'str_value_value', 'int64_value': 1073, 'float_value': 0.117, 'java_class_value': 'java_class_value_value', 'timestamp_value': {}, 'duration_value': {'seconds': 751, 'nanos': 543}, 'bool_value': True, 'short_str_value': 'short_str_value_value', 'url': 'url_value', 'label': 'label_value'}], 'output_collection_name': ['output_collection_name_value1', 'output_collection_name_value2'], 'input_collection_name': ['input_collection_name_value1', 'input_collection_name_value2']}], 'execution_pipeline_stage': [{'name': 'name_value', 'id': 'id_value', 'kind': 1, 'input_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value', 'size_bytes': 1089}], 'output_source': {}, 'prerequisite_stage': ['prerequisite_stage_value1', 'prerequisite_stage_value2'], 'component_transform': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform': 'original_transform_value'}], 'component_source': [{'user_name': 'user_name_value', 'name': 'name_value', 'original_transform_or_collection': 'original_transform_or_collection_value'}]}], 'display_data': {}}, 'stage_states': [{'execution_stage_name': 'execution_stage_name_value', 'execution_stage_state': 1, 'current_state_time': {}}], 'job_metadata': {'sdk_version': {'version': 'version_value', 'version_display_name': 'version_display_name_value', 'sdk_support_status': 1}, 'spanner_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'database_id': 'database_id_value'}], 'bigquery_details': [{'table': 'table_value', 'dataset': 'dataset_value', 'project_id': 'project_id_value', 'query': 'query_value'}], 'big_table_details': [{'project_id': 'project_id_value', 'instance_id': 'instance_id_value', 'table_id': 'table_id_value'}], 'pubsub_details': [{'topic': 'topic_value', 'subscription': 'subscription_value'}], 'file_details': [{'file_pattern': 'file_pattern_value'}], 'datastore_details': [{'namespace': 'namespace_value', 'project_id': 'project_id_value'}]}, 'start_time': {}, 'created_from_snapshot_id': 'created_from_snapshot_id_value', 'satisfies_pzs': True} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_job(request) - - -def test_update_job_rest_error(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - jobs.ListJobsRequest, - dict, -]) -def test_list_jobs_rest(request_type): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.ListJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_jobs_rest_interceptors(null_interceptor): - transport = transports.JobsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), - ) - client = JobsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_list_jobs") as post, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_list_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.ListJobsResponse.to_json(jobs.ListJobsResponse()) - - request = jobs.ListJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.ListJobsResponse() - - client.list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_jobs_rest_bad_request(transport: str = 'rest', request_type=jobs.ListJobsRequest): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_jobs(request) - - -def test_list_jobs_rest_pager(transport: str = 'rest'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1', 'location': 'sample2'} - - pager = client.list_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, jobs.Job) - for i in results) - - pages = list(client.list_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - jobs.ListJobsRequest, - dict, -]) -def test_aggregated_list_jobs_rest(request_type): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.ListJobsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.ListJobsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.aggregated_list_jobs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AggregatedListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_aggregated_list_jobs_rest_interceptors(null_interceptor): - transport = transports.JobsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), - ) - client = JobsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_aggregated_list_jobs") as post, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_aggregated_list_jobs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.ListJobsRequest.pb(jobs.ListJobsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.ListJobsResponse.to_json(jobs.ListJobsResponse()) - - request = jobs.ListJobsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.ListJobsResponse() - - client.aggregated_list_jobs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_aggregated_list_jobs_rest_bad_request(transport: str = 'rest', request_type=jobs.ListJobsRequest): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.aggregated_list_jobs(request) - - -def test_aggregated_list_jobs_rest_pager(transport: str = 'rest'): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - jobs.Job(), - ], - next_page_token='abc', - ), - jobs.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - ], - next_page_token='ghi', - ), - jobs.ListJobsResponse( - jobs=[ - jobs.Job(), - jobs.Job(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(jobs.ListJobsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1'} - - pager = client.aggregated_list_jobs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, jobs.Job) - for i in results) - - pages = list(client.aggregated_list_jobs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_check_active_jobs_rest_no_http_options(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = jobs.CheckActiveJobsRequest() - with pytest.raises(RuntimeError): - client.check_active_jobs(request) - - -@pytest.mark.parametrize("request_type", [ - jobs.SnapshotJobRequest, - dict, -]) -def test_snapshot_job_rest(request_type): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = snapshots.Snapshot( - id='id_value', - project_id='project_id_value', - source_job_id='source_job_id_value', - state=snapshots.SnapshotState.PENDING, - description='description_value', - disk_size_bytes=1611, - region='region_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = snapshots.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.snapshot_job(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.Snapshot) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.source_job_id == 'source_job_id_value' - assert response.state == snapshots.SnapshotState.PENDING - assert response.description == 'description_value' - assert response.disk_size_bytes == 1611 - assert response.region == 'region_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_snapshot_job_rest_interceptors(null_interceptor): - transport = transports.JobsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.JobsV1Beta3RestInterceptor(), - ) - client = JobsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "post_snapshot_job") as post, \ - mock.patch.object(transports.JobsV1Beta3RestInterceptor, "pre_snapshot_job") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = jobs.SnapshotJobRequest.pb(jobs.SnapshotJobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = snapshots.Snapshot.to_json(snapshots.Snapshot()) - - request = jobs.SnapshotJobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = snapshots.Snapshot() - - client.snapshot_job(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_snapshot_job_rest_bad_request(transport: str = 'rest', request_type=jobs.SnapshotJobRequest): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.snapshot_job(request) - - -def test_snapshot_job_rest_error(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_check_active_jobs_rest_error(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(RuntimeError) as runtime_error: - client.check_active_jobs({}) - assert ("Cannot define a method without a valid 'google.api.http' annotation." - in str(runtime_error.value)) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.JobsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = JobsV1Beta3Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.JobsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = JobsV1Beta3Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.JobsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.JobsV1Beta3GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.JobsV1Beta3GrpcTransport, - transports.JobsV1Beta3GrpcAsyncIOTransport, - transports.JobsV1Beta3RestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = JobsV1Beta3Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.JobsV1Beta3GrpcTransport, - ) - -def test_jobs_v1_beta3_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.JobsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_jobs_v1_beta3_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.JobsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_job', - 'get_job', - 'update_job', - 'list_jobs', - 'aggregated_list_jobs', - 'check_active_jobs', - 'snapshot_job', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_jobs_v1_beta3_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.JobsV1Beta3Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id="octopus", - ) - - -def test_jobs_v1_beta3_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.jobs_v1_beta3.transports.JobsV1Beta3Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.JobsV1Beta3Transport() - adc.assert_called_once() - - -def test_jobs_v1_beta3_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - JobsV1Beta3Client() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.JobsV1Beta3GrpcTransport, - transports.JobsV1Beta3GrpcAsyncIOTransport, - ], -) -def test_jobs_v1_beta3_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.JobsV1Beta3GrpcTransport, - transports.JobsV1Beta3GrpcAsyncIOTransport, - transports.JobsV1Beta3RestTransport, - ], -) -def test_jobs_v1_beta3_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.JobsV1Beta3GrpcTransport, grpc_helpers), - (transports.JobsV1Beta3GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_jobs_v1_beta3_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=["1", "2"], - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport]) -def test_jobs_v1_beta3_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_jobs_v1_beta3_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.JobsV1Beta3RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_jobs_v1_beta3_host_no_port(transport_name): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_jobs_v1_beta3_host_with_port(transport_name): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_jobs_v1_beta3_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = JobsV1Beta3Client( - credentials=creds1, - transport=transport_name, - ) - client2 = JobsV1Beta3Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_job._session - session2 = client2.transport.create_job._session - assert session1 != session2 - session1 = client1.transport.get_job._session - session2 = client2.transport.get_job._session - assert session1 != session2 - session1 = client1.transport.update_job._session - session2 = client2.transport.update_job._session - assert session1 != session2 - session1 = client1.transport.list_jobs._session - session2 = client2.transport.list_jobs._session - assert session1 != session2 - session1 = client1.transport.aggregated_list_jobs._session - session2 = client2.transport.aggregated_list_jobs._session - assert session1 != session2 - session1 = client1.transport.check_active_jobs._session - session2 = client2.transport.check_active_jobs._session - assert session1 != session2 - session1 = client1.transport.snapshot_job._session - session2 = client2.transport.snapshot_job._session - assert session1 != session2 -def test_jobs_v1_beta3_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.JobsV1Beta3GrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_jobs_v1_beta3_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.JobsV1Beta3GrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport]) -def test_jobs_v1_beta3_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.JobsV1Beta3GrpcTransport, transports.JobsV1Beta3GrpcAsyncIOTransport]) -def test_jobs_v1_beta3_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = JobsV1Beta3Client.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = JobsV1Beta3Client.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = JobsV1Beta3Client.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = JobsV1Beta3Client.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = JobsV1Beta3Client.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = JobsV1Beta3Client.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = JobsV1Beta3Client.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = JobsV1Beta3Client.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = JobsV1Beta3Client.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = JobsV1Beta3Client.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = JobsV1Beta3Client.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = JobsV1Beta3Client.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = JobsV1Beta3Client.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = JobsV1Beta3Client.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = JobsV1Beta3Client.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.JobsV1Beta3Transport, '_prep_wrapped_messages') as prep: - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.JobsV1Beta3Transport, '_prep_wrapped_messages') as prep: - transport_class = JobsV1Beta3Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = JobsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = JobsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (JobsV1Beta3Client, transports.JobsV1Beta3GrpcTransport), - (JobsV1Beta3AsyncClient, transports.JobsV1Beta3GrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py deleted file mode 100644 index a03218e..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_messages_v1_beta3.py +++ /dev/null @@ -1,1713 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import MessagesV1Beta3AsyncClient -from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import MessagesV1Beta3Client -from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.services.messages_v1_beta3 import transports -from google.cloud.dataflow_v1beta3.types import messages -from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MessagesV1Beta3Client._get_default_mtls_endpoint(None) is None - assert MessagesV1Beta3Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MessagesV1Beta3Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MessagesV1Beta3Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MessagesV1Beta3Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MessagesV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MessagesV1Beta3Client, "grpc"), - (MessagesV1Beta3AsyncClient, "grpc_asyncio"), - (MessagesV1Beta3Client, "rest"), -]) -def test_messages_v1_beta3_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MessagesV1Beta3GrpcTransport, "grpc"), - (transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.MessagesV1Beta3RestTransport, "rest"), -]) -def test_messages_v1_beta3_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MessagesV1Beta3Client, "grpc"), - (MessagesV1Beta3AsyncClient, "grpc_asyncio"), - (MessagesV1Beta3Client, "rest"), -]) -def test_messages_v1_beta3_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -def test_messages_v1_beta3_client_get_transport_class(): - transport = MessagesV1Beta3Client.get_transport_class() - available_transports = [ - transports.MessagesV1Beta3GrpcTransport, - transports.MessagesV1Beta3RestTransport, - ] - assert transport in available_transports - - transport = MessagesV1Beta3Client.get_transport_class("grpc") - assert transport == transports.MessagesV1Beta3GrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc"), - (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), -]) -@mock.patch.object(MessagesV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3Client)) -@mock.patch.object(MessagesV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3AsyncClient)) -def test_messages_v1_beta3_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MessagesV1Beta3Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MessagesV1Beta3Client, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc", "true"), - (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc", "false"), - (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest", "true"), - (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest", "false"), -]) -@mock.patch.object(MessagesV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3Client)) -@mock.patch.object(MessagesV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3AsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_messages_v1_beta3_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - MessagesV1Beta3Client, MessagesV1Beta3AsyncClient -]) -@mock.patch.object(MessagesV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3Client)) -@mock.patch.object(MessagesV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MessagesV1Beta3AsyncClient)) -def test_messages_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc"), - (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest"), -]) -def test_messages_v1_beta3_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc", grpc_helpers), - (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (MessagesV1Beta3Client, transports.MessagesV1Beta3RestTransport, "rest", None), -]) -def test_messages_v1_beta3_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_messages_v1_beta3_client_client_options_from_dict(): - with mock.patch('google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MessagesV1Beta3Client( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport, "grpc", grpc_helpers), - (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_messages_v1_beta3_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=None, - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - messages.ListJobMessagesRequest, - dict, -]) -def test_list_job_messages(request_type, transport: str = 'grpc'): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = messages.ListJobMessagesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_job_messages(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == messages.ListJobMessagesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobMessagesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_messages_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__') as call: - client.list_job_messages() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == messages.ListJobMessagesRequest() - -@pytest.mark.asyncio -async def test_list_job_messages_async(transport: str = 'grpc_asyncio', request_type=messages.ListJobMessagesRequest): - client = MessagesV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(messages.ListJobMessagesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_job_messages(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == messages.ListJobMessagesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobMessagesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_job_messages_async_from_dict(): - await test_list_job_messages_async(request_type=dict) - - -def test_list_job_messages_field_headers(): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = messages.ListJobMessagesRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__') as call: - call.return_value = messages.ListJobMessagesResponse() - client.list_job_messages(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_messages_field_headers_async(): - client = MessagesV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = messages.ListJobMessagesRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(messages.ListJobMessagesResponse()) - await client.list_job_messages(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -def test_list_job_messages_pager(transport_name: str = "grpc"): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - messages.JobMessage(), - ], - next_page_token='abc', - ), - messages.ListJobMessagesResponse( - job_messages=[], - next_page_token='def', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - ], - next_page_token='ghi', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project_id', ''), - ('location', ''), - ('job_id', ''), - )), - ) - pager = client.list_job_messages(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, messages.JobMessage) - for i in results) -def test_list_job_messages_pages(transport_name: str = "grpc"): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - messages.JobMessage(), - ], - next_page_token='abc', - ), - messages.ListJobMessagesResponse( - job_messages=[], - next_page_token='def', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - ], - next_page_token='ghi', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_messages(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_messages_async_pager(): - client = MessagesV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - messages.JobMessage(), - ], - next_page_token='abc', - ), - messages.ListJobMessagesResponse( - job_messages=[], - next_page_token='def', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - ], - next_page_token='ghi', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_messages(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, messages.JobMessage) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_job_messages_async_pages(): - client = MessagesV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_messages), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - messages.JobMessage(), - ], - next_page_token='abc', - ), - messages.ListJobMessagesResponse( - job_messages=[], - next_page_token='def', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - ], - next_page_token='ghi', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_messages(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - messages.ListJobMessagesRequest, - dict, -]) -def test_list_job_messages_rest(request_type): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = messages.ListJobMessagesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = messages.ListJobMessagesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_job_messages(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobMessagesPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_job_messages_rest_interceptors(null_interceptor): - transport = transports.MessagesV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MessagesV1Beta3RestInterceptor(), - ) - client = MessagesV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MessagesV1Beta3RestInterceptor, "post_list_job_messages") as post, \ - mock.patch.object(transports.MessagesV1Beta3RestInterceptor, "pre_list_job_messages") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = messages.ListJobMessagesRequest.pb(messages.ListJobMessagesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = messages.ListJobMessagesResponse.to_json(messages.ListJobMessagesResponse()) - - request = messages.ListJobMessagesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = messages.ListJobMessagesResponse() - - client.list_job_messages(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_job_messages_rest_bad_request(transport: str = 'rest', request_type=messages.ListJobMessagesRequest): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_job_messages(request) - - -def test_list_job_messages_rest_pager(transport: str = 'rest'): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - messages.JobMessage(), - ], - next_page_token='abc', - ), - messages.ListJobMessagesResponse( - job_messages=[], - next_page_token='def', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - ], - next_page_token='ghi', - ), - messages.ListJobMessagesResponse( - job_messages=[ - messages.JobMessage(), - messages.JobMessage(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(messages.ListJobMessagesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - - pager = client.list_job_messages(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, messages.JobMessage) - for i in results) - - pages = list(client.list_job_messages(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MessagesV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MessagesV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MessagesV1Beta3Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MessagesV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MessagesV1Beta3Client( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MessagesV1Beta3Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MessagesV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MessagesV1Beta3Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MessagesV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MessagesV1Beta3Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MessagesV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MessagesV1Beta3GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MessagesV1Beta3GrpcTransport, - transports.MessagesV1Beta3GrpcAsyncIOTransport, - transports.MessagesV1Beta3RestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = MessagesV1Beta3Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MessagesV1Beta3GrpcTransport, - ) - -def test_messages_v1_beta3_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MessagesV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_messages_v1_beta3_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.MessagesV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_job_messages', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_messages_v1_beta3_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MessagesV1Beta3Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id="octopus", - ) - - -def test_messages_v1_beta3_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.messages_v1_beta3.transports.MessagesV1Beta3Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MessagesV1Beta3Transport() - adc.assert_called_once() - - -def test_messages_v1_beta3_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MessagesV1Beta3Client() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MessagesV1Beta3GrpcTransport, - transports.MessagesV1Beta3GrpcAsyncIOTransport, - ], -) -def test_messages_v1_beta3_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MessagesV1Beta3GrpcTransport, - transports.MessagesV1Beta3GrpcAsyncIOTransport, - transports.MessagesV1Beta3RestTransport, - ], -) -def test_messages_v1_beta3_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MessagesV1Beta3GrpcTransport, grpc_helpers), - (transports.MessagesV1Beta3GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_messages_v1_beta3_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=["1", "2"], - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport]) -def test_messages_v1_beta3_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_messages_v1_beta3_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MessagesV1Beta3RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_messages_v1_beta3_host_no_port(transport_name): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_messages_v1_beta3_host_with_port(transport_name): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_messages_v1_beta3_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = MessagesV1Beta3Client( - credentials=creds1, - transport=transport_name, - ) - client2 = MessagesV1Beta3Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_job_messages._session - session2 = client2.transport.list_job_messages._session - assert session1 != session2 -def test_messages_v1_beta3_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MessagesV1Beta3GrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_messages_v1_beta3_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MessagesV1Beta3GrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport]) -def test_messages_v1_beta3_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MessagesV1Beta3GrpcTransport, transports.MessagesV1Beta3GrpcAsyncIOTransport]) -def test_messages_v1_beta3_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MessagesV1Beta3Client.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = MessagesV1Beta3Client.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MessagesV1Beta3Client.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = MessagesV1Beta3Client.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = MessagesV1Beta3Client.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MessagesV1Beta3Client.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MessagesV1Beta3Client.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = MessagesV1Beta3Client.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MessagesV1Beta3Client.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = MessagesV1Beta3Client.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = MessagesV1Beta3Client.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MessagesV1Beta3Client.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MessagesV1Beta3Client.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = MessagesV1Beta3Client.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MessagesV1Beta3Client.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MessagesV1Beta3Transport, '_prep_wrapped_messages') as prep: - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MessagesV1Beta3Transport, '_prep_wrapped_messages') as prep: - transport_class = MessagesV1Beta3Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = MessagesV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = MessagesV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (MessagesV1Beta3Client, transports.MessagesV1Beta3GrpcTransport), - (MessagesV1Beta3AsyncClient, transports.MessagesV1Beta3GrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py deleted file mode 100644 index 60df478..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_metrics_v1_beta3.py +++ /dev/null @@ -1,2477 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import MetricsV1Beta3AsyncClient -from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import MetricsV1Beta3Client -from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import pagers -from google.cloud.dataflow_v1beta3.services.metrics_v1_beta3 import transports -from google.cloud.dataflow_v1beta3.types import metrics -from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert MetricsV1Beta3Client._get_default_mtls_endpoint(None) is None - assert MetricsV1Beta3Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert MetricsV1Beta3Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert MetricsV1Beta3Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert MetricsV1Beta3Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert MetricsV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetricsV1Beta3Client, "grpc"), - (MetricsV1Beta3AsyncClient, "grpc_asyncio"), - (MetricsV1Beta3Client, "rest"), -]) -def test_metrics_v1_beta3_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.MetricsV1Beta3GrpcTransport, "grpc"), - (transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.MetricsV1Beta3RestTransport, "rest"), -]) -def test_metrics_v1_beta3_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (MetricsV1Beta3Client, "grpc"), - (MetricsV1Beta3AsyncClient, "grpc_asyncio"), - (MetricsV1Beta3Client, "rest"), -]) -def test_metrics_v1_beta3_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -def test_metrics_v1_beta3_client_get_transport_class(): - transport = MetricsV1Beta3Client.get_transport_class() - available_transports = [ - transports.MetricsV1Beta3GrpcTransport, - transports.MetricsV1Beta3RestTransport, - ] - assert transport in available_transports - - transport = MetricsV1Beta3Client.get_transport_class("grpc") - assert transport == transports.MetricsV1Beta3GrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc"), - (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), -]) -@mock.patch.object(MetricsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3Client)) -@mock.patch.object(MetricsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3AsyncClient)) -def test_metrics_v1_beta3_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(MetricsV1Beta3Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(MetricsV1Beta3Client, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc", "true"), - (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc", "false"), - (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", "true"), - (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", "false"), -]) -@mock.patch.object(MetricsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3Client)) -@mock.patch.object(MetricsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3AsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_metrics_v1_beta3_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - MetricsV1Beta3Client, MetricsV1Beta3AsyncClient -]) -@mock.patch.object(MetricsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3Client)) -@mock.patch.object(MetricsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsV1Beta3AsyncClient)) -def test_metrics_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc"), - (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest"), -]) -def test_metrics_v1_beta3_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc", grpc_helpers), - (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (MetricsV1Beta3Client, transports.MetricsV1Beta3RestTransport, "rest", None), -]) -def test_metrics_v1_beta3_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_metrics_v1_beta3_client_client_options_from_dict(): - with mock.patch('google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = MetricsV1Beta3Client( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport, "grpc", grpc_helpers), - (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_metrics_v1_beta3_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=None, - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - metrics.GetJobMetricsRequest, - dict, -]) -def test_get_job_metrics(request_type, transport: str = 'grpc'): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_metrics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metrics.JobMetrics( - ) - response = client.get_job_metrics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetJobMetricsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, metrics.JobMetrics) - - -def test_get_job_metrics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_metrics), - '__call__') as call: - client.get_job_metrics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetJobMetricsRequest() - -@pytest.mark.asyncio -async def test_get_job_metrics_async(transport: str = 'grpc_asyncio', request_type=metrics.GetJobMetricsRequest): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_metrics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metrics.JobMetrics( - )) - response = await client.get_job_metrics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetJobMetricsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, metrics.JobMetrics) - - -@pytest.mark.asyncio -async def test_get_job_metrics_async_from_dict(): - await test_get_job_metrics_async(request_type=dict) - - -def test_get_job_metrics_field_headers(): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metrics.GetJobMetricsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_metrics), - '__call__') as call: - call.return_value = metrics.JobMetrics() - client.get_job_metrics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_metrics_field_headers_async(): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metrics.GetJobMetricsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_metrics), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metrics.JobMetrics()) - await client.get_job_metrics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - metrics.GetJobExecutionDetailsRequest, - dict, -]) -def test_get_job_execution_details(request_type, transport: str = 'grpc'): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metrics.JobExecutionDetails( - next_page_token='next_page_token_value', - ) - response = client.get_job_execution_details(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetJobExecutionDetailsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.GetJobExecutionDetailsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_get_job_execution_details_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__') as call: - client.get_job_execution_details() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetJobExecutionDetailsRequest() - -@pytest.mark.asyncio -async def test_get_job_execution_details_async(transport: str = 'grpc_asyncio', request_type=metrics.GetJobExecutionDetailsRequest): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metrics.JobExecutionDetails( - next_page_token='next_page_token_value', - )) - response = await client.get_job_execution_details(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetJobExecutionDetailsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.GetJobExecutionDetailsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_get_job_execution_details_async_from_dict(): - await test_get_job_execution_details_async(request_type=dict) - - -def test_get_job_execution_details_field_headers(): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metrics.GetJobExecutionDetailsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__') as call: - call.return_value = metrics.JobExecutionDetails() - client.get_job_execution_details(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_execution_details_field_headers_async(): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metrics.GetJobExecutionDetailsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metrics.JobExecutionDetails()) - await client.get_job_execution_details(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -def test_get_job_execution_details_pager(transport_name: str = "grpc"): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - metrics.StageSummary(), - ], - next_page_token='abc', - ), - metrics.JobExecutionDetails( - stages=[], - next_page_token='def', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - ], - next_page_token='ghi', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project_id', ''), - ('location', ''), - ('job_id', ''), - )), - ) - pager = client.get_job_execution_details(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metrics.StageSummary) - for i in results) -def test_get_job_execution_details_pages(transport_name: str = "grpc"): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - metrics.StageSummary(), - ], - next_page_token='abc', - ), - metrics.JobExecutionDetails( - stages=[], - next_page_token='def', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - ], - next_page_token='ghi', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - ], - ), - RuntimeError, - ) - pages = list(client.get_job_execution_details(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_get_job_execution_details_async_pager(): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - metrics.StageSummary(), - ], - next_page_token='abc', - ), - metrics.JobExecutionDetails( - stages=[], - next_page_token='def', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - ], - next_page_token='ghi', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - ], - ), - RuntimeError, - ) - async_pager = await client.get_job_execution_details(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metrics.StageSummary) - for i in responses) - - -@pytest.mark.asyncio -async def test_get_job_execution_details_async_pages(): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_execution_details), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - metrics.StageSummary(), - ], - next_page_token='abc', - ), - metrics.JobExecutionDetails( - stages=[], - next_page_token='def', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - ], - next_page_token='ghi', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.get_job_execution_details(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - metrics.GetStageExecutionDetailsRequest, - dict, -]) -def test_get_stage_execution_details(request_type, transport: str = 'grpc'): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = metrics.StageExecutionDetails( - next_page_token='next_page_token_value', - ) - response = client.get_stage_execution_details(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetStageExecutionDetailsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.GetStageExecutionDetailsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_get_stage_execution_details_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__') as call: - client.get_stage_execution_details() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetStageExecutionDetailsRequest() - -@pytest.mark.asyncio -async def test_get_stage_execution_details_async(transport: str = 'grpc_asyncio', request_type=metrics.GetStageExecutionDetailsRequest): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(metrics.StageExecutionDetails( - next_page_token='next_page_token_value', - )) - response = await client.get_stage_execution_details(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == metrics.GetStageExecutionDetailsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.GetStageExecutionDetailsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_get_stage_execution_details_async_from_dict(): - await test_get_stage_execution_details_async(request_type=dict) - - -def test_get_stage_execution_details_field_headers(): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metrics.GetStageExecutionDetailsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - request.stage_id = 'stage_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__') as call: - call.return_value = metrics.StageExecutionDetails() - client.get_stage_execution_details(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value&stage_id=stage_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_stage_execution_details_field_headers_async(): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = metrics.GetStageExecutionDetailsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - request.stage_id = 'stage_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metrics.StageExecutionDetails()) - await client.get_stage_execution_details(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value&stage_id=stage_id_value', - ) in kw['metadata'] - - -def test_get_stage_execution_details_pager(transport_name: str = "grpc"): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - next_page_token='abc', - ), - metrics.StageExecutionDetails( - workers=[], - next_page_token='def', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - ], - next_page_token='ghi', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project_id', ''), - ('location', ''), - ('job_id', ''), - ('stage_id', ''), - )), - ) - pager = client.get_stage_execution_details(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metrics.WorkerDetails) - for i in results) -def test_get_stage_execution_details_pages(transport_name: str = "grpc"): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - next_page_token='abc', - ), - metrics.StageExecutionDetails( - workers=[], - next_page_token='def', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - ], - next_page_token='ghi', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - ), - RuntimeError, - ) - pages = list(client.get_stage_execution_details(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_get_stage_execution_details_async_pager(): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - next_page_token='abc', - ), - metrics.StageExecutionDetails( - workers=[], - next_page_token='def', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - ], - next_page_token='ghi', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - ), - RuntimeError, - ) - async_pager = await client.get_stage_execution_details(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, metrics.WorkerDetails) - for i in responses) - - -@pytest.mark.asyncio -async def test_get_stage_execution_details_async_pages(): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stage_execution_details), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - next_page_token='abc', - ), - metrics.StageExecutionDetails( - workers=[], - next_page_token='def', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - ], - next_page_token='ghi', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.get_stage_execution_details(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - metrics.GetJobMetricsRequest, - dict, -]) -def test_get_job_metrics_rest(request_type): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metrics.JobMetrics( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = metrics.JobMetrics.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_job_metrics(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, metrics.JobMetrics) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_metrics_rest_interceptors(null_interceptor): - transport = transports.MetricsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsV1Beta3RestInterceptor(), - ) - client = MetricsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "post_get_job_metrics") as post, \ - mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "pre_get_job_metrics") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = metrics.GetJobMetricsRequest.pb(metrics.GetJobMetricsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = metrics.JobMetrics.to_json(metrics.JobMetrics()) - - request = metrics.GetJobMetricsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metrics.JobMetrics() - - client.get_job_metrics(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_job_metrics_rest_bad_request(transport: str = 'rest', request_type=metrics.GetJobMetricsRequest): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_job_metrics(request) - - -def test_get_job_metrics_rest_error(): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - metrics.GetJobExecutionDetailsRequest, - dict, -]) -def test_get_job_execution_details_rest(request_type): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metrics.JobExecutionDetails( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = metrics.JobExecutionDetails.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_job_execution_details(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.GetJobExecutionDetailsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_job_execution_details_rest_interceptors(null_interceptor): - transport = transports.MetricsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsV1Beta3RestInterceptor(), - ) - client = MetricsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "post_get_job_execution_details") as post, \ - mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "pre_get_job_execution_details") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = metrics.GetJobExecutionDetailsRequest.pb(metrics.GetJobExecutionDetailsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = metrics.JobExecutionDetails.to_json(metrics.JobExecutionDetails()) - - request = metrics.GetJobExecutionDetailsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metrics.JobExecutionDetails() - - client.get_job_execution_details(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_job_execution_details_rest_bad_request(transport: str = 'rest', request_type=metrics.GetJobExecutionDetailsRequest): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_job_execution_details(request) - - -def test_get_job_execution_details_rest_pager(transport: str = 'rest'): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - metrics.StageSummary(), - ], - next_page_token='abc', - ), - metrics.JobExecutionDetails( - stages=[], - next_page_token='def', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - ], - next_page_token='ghi', - ), - metrics.JobExecutionDetails( - stages=[ - metrics.StageSummary(), - metrics.StageSummary(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metrics.JobExecutionDetails.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - - pager = client.get_job_execution_details(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metrics.StageSummary) - for i in results) - - pages = list(client.get_job_execution_details(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - metrics.GetStageExecutionDetailsRequest, - dict, -]) -def test_get_stage_execution_details_rest(request_type): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3', 'stage_id': 'sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = metrics.StageExecutionDetails( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = metrics.StageExecutionDetails.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_stage_execution_details(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.GetStageExecutionDetailsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_stage_execution_details_rest_interceptors(null_interceptor): - transport = transports.MetricsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsV1Beta3RestInterceptor(), - ) - client = MetricsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "post_get_stage_execution_details") as post, \ - mock.patch.object(transports.MetricsV1Beta3RestInterceptor, "pre_get_stage_execution_details") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = metrics.GetStageExecutionDetailsRequest.pb(metrics.GetStageExecutionDetailsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = metrics.StageExecutionDetails.to_json(metrics.StageExecutionDetails()) - - request = metrics.GetStageExecutionDetailsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = metrics.StageExecutionDetails() - - client.get_stage_execution_details(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_stage_execution_details_rest_bad_request(transport: str = 'rest', request_type=metrics.GetStageExecutionDetailsRequest): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3', 'stage_id': 'sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_stage_execution_details(request) - - -def test_get_stage_execution_details_rest_pager(transport: str = 'rest'): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - next_page_token='abc', - ), - metrics.StageExecutionDetails( - workers=[], - next_page_token='def', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - ], - next_page_token='ghi', - ), - metrics.StageExecutionDetails( - workers=[ - metrics.WorkerDetails(), - metrics.WorkerDetails(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(metrics.StageExecutionDetails.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3', 'stage_id': 'sample4'} - - pager = client.get_stage_execution_details(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, metrics.WorkerDetails) - for i in results) - - pages = list(client.get_stage_execution_details(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.MetricsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.MetricsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsV1Beta3Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.MetricsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetricsV1Beta3Client( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = MetricsV1Beta3Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.MetricsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = MetricsV1Beta3Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetricsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = MetricsV1Beta3Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.MetricsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.MetricsV1Beta3GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.MetricsV1Beta3GrpcTransport, - transports.MetricsV1Beta3GrpcAsyncIOTransport, - transports.MetricsV1Beta3RestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = MetricsV1Beta3Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.MetricsV1Beta3GrpcTransport, - ) - -def test_metrics_v1_beta3_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.MetricsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_metrics_v1_beta3_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.MetricsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'get_job_metrics', - 'get_job_execution_details', - 'get_stage_execution_details', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_metrics_v1_beta3_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricsV1Beta3Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id="octopus", - ) - - -def test_metrics_v1_beta3_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.metrics_v1_beta3.transports.MetricsV1Beta3Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricsV1Beta3Transport() - adc.assert_called_once() - - -def test_metrics_v1_beta3_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetricsV1Beta3Client() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetricsV1Beta3GrpcTransport, - transports.MetricsV1Beta3GrpcAsyncIOTransport, - ], -) -def test_metrics_v1_beta3_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetricsV1Beta3GrpcTransport, - transports.MetricsV1Beta3GrpcAsyncIOTransport, - transports.MetricsV1Beta3RestTransport, - ], -) -def test_metrics_v1_beta3_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.MetricsV1Beta3GrpcTransport, grpc_helpers), - (transports.MetricsV1Beta3GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_metrics_v1_beta3_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=["1", "2"], - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport]) -def test_metrics_v1_beta3_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_metrics_v1_beta3_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MetricsV1Beta3RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_metrics_v1_beta3_host_no_port(transport_name): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_metrics_v1_beta3_host_with_port(transport_name): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_metrics_v1_beta3_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = MetricsV1Beta3Client( - credentials=creds1, - transport=transport_name, - ) - client2 = MetricsV1Beta3Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_job_metrics._session - session2 = client2.transport.get_job_metrics._session - assert session1 != session2 - session1 = client1.transport.get_job_execution_details._session - session2 = client2.transport.get_job_execution_details._session - assert session1 != session2 - session1 = client1.transport.get_stage_execution_details._session - session2 = client2.transport.get_stage_execution_details._session - assert session1 != session2 -def test_metrics_v1_beta3_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetricsV1Beta3GrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_metrics_v1_beta3_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.MetricsV1Beta3GrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport]) -def test_metrics_v1_beta3_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.MetricsV1Beta3GrpcTransport, transports.MetricsV1Beta3GrpcAsyncIOTransport]) -def test_metrics_v1_beta3_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = MetricsV1Beta3Client.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = MetricsV1Beta3Client.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsV1Beta3Client.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = MetricsV1Beta3Client.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = MetricsV1Beta3Client.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsV1Beta3Client.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = MetricsV1Beta3Client.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = MetricsV1Beta3Client.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsV1Beta3Client.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = MetricsV1Beta3Client.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = MetricsV1Beta3Client.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsV1Beta3Client.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = MetricsV1Beta3Client.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = MetricsV1Beta3Client.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = MetricsV1Beta3Client.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.MetricsV1Beta3Transport, '_prep_wrapped_messages') as prep: - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.MetricsV1Beta3Transport, '_prep_wrapped_messages') as prep: - transport_class = MetricsV1Beta3Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = MetricsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = MetricsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (MetricsV1Beta3Client, transports.MetricsV1Beta3GrpcTransport), - (MetricsV1Beta3AsyncClient, transports.MetricsV1Beta3GrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py deleted file mode 100644 index 785bf64..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_snapshots_v1_beta3.py +++ /dev/null @@ -1,2013 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import SnapshotsV1Beta3AsyncClient -from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import SnapshotsV1Beta3Client -from google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3 import transports -from google.cloud.dataflow_v1beta3.types import snapshots -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(None) is None - assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert SnapshotsV1Beta3Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SnapshotsV1Beta3Client, "grpc"), - (SnapshotsV1Beta3AsyncClient, "grpc_asyncio"), - (SnapshotsV1Beta3Client, "rest"), -]) -def test_snapshots_v1_beta3_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.SnapshotsV1Beta3GrpcTransport, "grpc"), - (transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.SnapshotsV1Beta3RestTransport, "rest"), -]) -def test_snapshots_v1_beta3_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SnapshotsV1Beta3Client, "grpc"), - (SnapshotsV1Beta3AsyncClient, "grpc_asyncio"), - (SnapshotsV1Beta3Client, "rest"), -]) -def test_snapshots_v1_beta3_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -def test_snapshots_v1_beta3_client_get_transport_class(): - transport = SnapshotsV1Beta3Client.get_transport_class() - available_transports = [ - transports.SnapshotsV1Beta3GrpcTransport, - transports.SnapshotsV1Beta3RestTransport, - ] - assert transport in available_transports - - transport = SnapshotsV1Beta3Client.get_transport_class("grpc") - assert transport == transports.SnapshotsV1Beta3GrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc"), - (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), -]) -@mock.patch.object(SnapshotsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3Client)) -@mock.patch.object(SnapshotsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3AsyncClient)) -def test_snapshots_v1_beta3_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(SnapshotsV1Beta3Client, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(SnapshotsV1Beta3Client, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc", "true"), - (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "true"), - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc", "false"), - (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest", "true"), - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest", "false"), -]) -@mock.patch.object(SnapshotsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3Client)) -@mock.patch.object(SnapshotsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3AsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_snapshots_v1_beta3_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - SnapshotsV1Beta3Client, SnapshotsV1Beta3AsyncClient -]) -@mock.patch.object(SnapshotsV1Beta3Client, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3Client)) -@mock.patch.object(SnapshotsV1Beta3AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SnapshotsV1Beta3AsyncClient)) -def test_snapshots_v1_beta3_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc"), - (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio"), - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest"), -]) -def test_snapshots_v1_beta3_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc", grpc_helpers), - (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3RestTransport, "rest", None), -]) -def test_snapshots_v1_beta3_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_snapshots_v1_beta3_client_client_options_from_dict(): - with mock.patch('google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = SnapshotsV1Beta3Client( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport, "grpc", grpc_helpers), - (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_snapshots_v1_beta3_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=None, - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - snapshots.GetSnapshotRequest, - dict, -]) -def test_get_snapshot(request_type, transport: str = 'grpc'): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = snapshots.Snapshot( - id='id_value', - project_id='project_id_value', - source_job_id='source_job_id_value', - state=snapshots.SnapshotState.PENDING, - description='description_value', - disk_size_bytes=1611, - region='region_value', - ) - response = client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.GetSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.Snapshot) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.source_job_id == 'source_job_id_value' - assert response.state == snapshots.SnapshotState.PENDING - assert response.description == 'description_value' - assert response.disk_size_bytes == 1611 - assert response.region == 'region_value' - - -def test_get_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - client.get_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.GetSnapshotRequest() - -@pytest.mark.asyncio -async def test_get_snapshot_async(transport: str = 'grpc_asyncio', request_type=snapshots.GetSnapshotRequest): - client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(snapshots.Snapshot( - id='id_value', - project_id='project_id_value', - source_job_id='source_job_id_value', - state=snapshots.SnapshotState.PENDING, - description='description_value', - disk_size_bytes=1611, - region='region_value', - )) - response = await client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.GetSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.Snapshot) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.source_job_id == 'source_job_id_value' - assert response.state == snapshots.SnapshotState.PENDING - assert response.description == 'description_value' - assert response.disk_size_bytes == 1611 - assert response.region == 'region_value' - - -@pytest.mark.asyncio -async def test_get_snapshot_async_from_dict(): - await test_get_snapshot_async(request_type=dict) - - -def test_get_snapshot_field_headers(): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = snapshots.GetSnapshotRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.snapshot_id = 'snapshot_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - call.return_value = snapshots.Snapshot() - client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&snapshot_id=snapshot_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_snapshot_field_headers_async(): - client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = snapshots.GetSnapshotRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.snapshot_id = 'snapshot_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(snapshots.Snapshot()) - await client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&snapshot_id=snapshot_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - snapshots.DeleteSnapshotRequest, - dict, -]) -def test_delete_snapshot(request_type, transport: str = 'grpc'): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = snapshots.DeleteSnapshotResponse( - ) - response = client.delete_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.DeleteSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.DeleteSnapshotResponse) - - -def test_delete_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - client.delete_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.DeleteSnapshotRequest() - -@pytest.mark.asyncio -async def test_delete_snapshot_async(transport: str = 'grpc_asyncio', request_type=snapshots.DeleteSnapshotRequest): - client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(snapshots.DeleteSnapshotResponse( - )) - response = await client.delete_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.DeleteSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.DeleteSnapshotResponse) - - -@pytest.mark.asyncio -async def test_delete_snapshot_async_from_dict(): - await test_delete_snapshot_async(request_type=dict) - - -def test_delete_snapshot_field_headers(): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = snapshots.DeleteSnapshotRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.snapshot_id = 'snapshot_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - call.return_value = snapshots.DeleteSnapshotResponse() - client.delete_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&snapshot_id=snapshot_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_snapshot_field_headers_async(): - client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = snapshots.DeleteSnapshotRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.snapshot_id = 'snapshot_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(snapshots.DeleteSnapshotResponse()) - await client.delete_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&snapshot_id=snapshot_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - snapshots.ListSnapshotsRequest, - dict, -]) -def test_list_snapshots(request_type, transport: str = 'grpc'): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = snapshots.ListSnapshotsResponse( - ) - response = client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.ListSnapshotsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.ListSnapshotsResponse) - - -def test_list_snapshots_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - client.list_snapshots() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.ListSnapshotsRequest() - -@pytest.mark.asyncio -async def test_list_snapshots_async(transport: str = 'grpc_asyncio', request_type=snapshots.ListSnapshotsRequest): - client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(snapshots.ListSnapshotsResponse( - )) - response = await client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == snapshots.ListSnapshotsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.ListSnapshotsResponse) - - -@pytest.mark.asyncio -async def test_list_snapshots_async_from_dict(): - await test_list_snapshots_async(request_type=dict) - - -def test_list_snapshots_field_headers(): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = snapshots.ListSnapshotsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - call.return_value = snapshots.ListSnapshotsResponse() - client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_snapshots_field_headers_async(): - client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = snapshots.ListSnapshotsRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - request.job_id = 'job_id_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(snapshots.ListSnapshotsResponse()) - await client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value&job_id=job_id_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - snapshots.GetSnapshotRequest, - dict, -]) -def test_get_snapshot_rest(request_type): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'snapshot_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = snapshots.Snapshot( - id='id_value', - project_id='project_id_value', - source_job_id='source_job_id_value', - state=snapshots.SnapshotState.PENDING, - description='description_value', - disk_size_bytes=1611, - region='region_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = snapshots.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_snapshot(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.Snapshot) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.source_job_id == 'source_job_id_value' - assert response.state == snapshots.SnapshotState.PENDING - assert response.description == 'description_value' - assert response.disk_size_bytes == 1611 - assert response.region == 'region_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_snapshot_rest_interceptors(null_interceptor): - transport = transports.SnapshotsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SnapshotsV1Beta3RestInterceptor(), - ) - client = SnapshotsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "post_get_snapshot") as post, \ - mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "pre_get_snapshot") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = snapshots.GetSnapshotRequest.pb(snapshots.GetSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = snapshots.Snapshot.to_json(snapshots.Snapshot()) - - request = snapshots.GetSnapshotRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = snapshots.Snapshot() - - client.get_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_snapshot_rest_bad_request(transport: str = 'rest', request_type=snapshots.GetSnapshotRequest): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'snapshot_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_snapshot(request) - - -def test_get_snapshot_rest_error(): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - snapshots.DeleteSnapshotRequest, - dict, -]) -def test_delete_snapshot_rest(request_type): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'snapshot_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = snapshots.DeleteSnapshotResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = snapshots.DeleteSnapshotResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_snapshot(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.DeleteSnapshotResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_snapshot_rest_interceptors(null_interceptor): - transport = transports.SnapshotsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SnapshotsV1Beta3RestInterceptor(), - ) - client = SnapshotsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "post_delete_snapshot") as post, \ - mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "pre_delete_snapshot") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = snapshots.DeleteSnapshotRequest.pb(snapshots.DeleteSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = snapshots.DeleteSnapshotResponse.to_json(snapshots.DeleteSnapshotResponse()) - - request = snapshots.DeleteSnapshotRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = snapshots.DeleteSnapshotResponse() - - client.delete_snapshot(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_snapshot_rest_bad_request(transport: str = 'rest', request_type=snapshots.DeleteSnapshotRequest): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'snapshot_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_snapshot(request) - - -def test_delete_snapshot_rest_error(): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - snapshots.ListSnapshotsRequest, - dict, -]) -def test_list_snapshots_rest(request_type): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = snapshots.ListSnapshotsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = snapshots.ListSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_snapshots(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, snapshots.ListSnapshotsResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_snapshots_rest_interceptors(null_interceptor): - transport = transports.SnapshotsV1Beta3RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.SnapshotsV1Beta3RestInterceptor(), - ) - client = SnapshotsV1Beta3Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "post_list_snapshots") as post, \ - mock.patch.object(transports.SnapshotsV1Beta3RestInterceptor, "pre_list_snapshots") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = snapshots.ListSnapshotsRequest.pb(snapshots.ListSnapshotsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = snapshots.ListSnapshotsResponse.to_json(snapshots.ListSnapshotsResponse()) - - request = snapshots.ListSnapshotsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = snapshots.ListSnapshotsResponse() - - client.list_snapshots(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_snapshots_rest_bad_request(transport: str = 'rest', request_type=snapshots.ListSnapshotsRequest): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2', 'job_id': 'sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_snapshots(request) - - -def test_list_snapshots_rest_error(): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SnapshotsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SnapshotsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SnapshotsV1Beta3Client( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.SnapshotsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SnapshotsV1Beta3Client( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SnapshotsV1Beta3Client( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.SnapshotsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SnapshotsV1Beta3Client( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SnapshotsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = SnapshotsV1Beta3Client(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SnapshotsV1Beta3GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.SnapshotsV1Beta3GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.SnapshotsV1Beta3GrpcTransport, - transports.SnapshotsV1Beta3GrpcAsyncIOTransport, - transports.SnapshotsV1Beta3RestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = SnapshotsV1Beta3Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SnapshotsV1Beta3GrpcTransport, - ) - -def test_snapshots_v1_beta3_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SnapshotsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_snapshots_v1_beta3_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.SnapshotsV1Beta3Transport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'get_snapshot', - 'delete_snapshot', - 'list_snapshots', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_snapshots_v1_beta3_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SnapshotsV1Beta3Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id="octopus", - ) - - -def test_snapshots_v1_beta3_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.snapshots_v1_beta3.transports.SnapshotsV1Beta3Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SnapshotsV1Beta3Transport() - adc.assert_called_once() - - -def test_snapshots_v1_beta3_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SnapshotsV1Beta3Client() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SnapshotsV1Beta3GrpcTransport, - transports.SnapshotsV1Beta3GrpcAsyncIOTransport, - ], -) -def test_snapshots_v1_beta3_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SnapshotsV1Beta3GrpcTransport, - transports.SnapshotsV1Beta3GrpcAsyncIOTransport, - transports.SnapshotsV1Beta3RestTransport, - ], -) -def test_snapshots_v1_beta3_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.SnapshotsV1Beta3GrpcTransport, grpc_helpers), - (transports.SnapshotsV1Beta3GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_snapshots_v1_beta3_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=["1", "2"], - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport]) -def test_snapshots_v1_beta3_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_snapshots_v1_beta3_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.SnapshotsV1Beta3RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_snapshots_v1_beta3_host_no_port(transport_name): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_snapshots_v1_beta3_host_with_port(transport_name): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_snapshots_v1_beta3_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = SnapshotsV1Beta3Client( - credentials=creds1, - transport=transport_name, - ) - client2 = SnapshotsV1Beta3Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_snapshot._session - session2 = client2.transport.get_snapshot._session - assert session1 != session2 - session1 = client1.transport.delete_snapshot._session - session2 = client2.transport.delete_snapshot._session - assert session1 != session2 - session1 = client1.transport.list_snapshots._session - session2 = client2.transport.list_snapshots._session - assert session1 != session2 -def test_snapshots_v1_beta3_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SnapshotsV1Beta3GrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_snapshots_v1_beta3_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SnapshotsV1Beta3GrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport]) -def test_snapshots_v1_beta3_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SnapshotsV1Beta3GrpcTransport, transports.SnapshotsV1Beta3GrpcAsyncIOTransport]) -def test_snapshots_v1_beta3_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = SnapshotsV1Beta3Client.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = SnapshotsV1Beta3Client.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = SnapshotsV1Beta3Client.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = SnapshotsV1Beta3Client.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = SnapshotsV1Beta3Client.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = SnapshotsV1Beta3Client.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = SnapshotsV1Beta3Client.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = SnapshotsV1Beta3Client.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = SnapshotsV1Beta3Client.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = SnapshotsV1Beta3Client.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = SnapshotsV1Beta3Client.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = SnapshotsV1Beta3Client.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = SnapshotsV1Beta3Client.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = SnapshotsV1Beta3Client.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = SnapshotsV1Beta3Client.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.SnapshotsV1Beta3Transport, '_prep_wrapped_messages') as prep: - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.SnapshotsV1Beta3Transport, '_prep_wrapped_messages') as prep: - transport_class = SnapshotsV1Beta3Client.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = SnapshotsV1Beta3AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = SnapshotsV1Beta3Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (SnapshotsV1Beta3Client, transports.SnapshotsV1Beta3GrpcTransport), - (SnapshotsV1Beta3AsyncClient, transports.SnapshotsV1Beta3GrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py b/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py deleted file mode 100644 index 00d9c6d..0000000 --- a/owl-bot-staging/v1beta3/tests/unit/gapic/dataflow_v1beta3/test_templates_service.py +++ /dev/null @@ -1,2059 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dataflow_v1beta3.services.templates_service import TemplatesServiceAsyncClient -from google.cloud.dataflow_v1beta3.services.templates_service import TemplatesServiceClient -from google.cloud.dataflow_v1beta3.services.templates_service import transports -from google.cloud.dataflow_v1beta3.types import environment -from google.cloud.dataflow_v1beta3.types import jobs -from google.cloud.dataflow_v1beta3.types import templates -from google.oauth2 import service_account -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert TemplatesServiceClient._get_default_mtls_endpoint(None) is None - assert TemplatesServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert TemplatesServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert TemplatesServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert TemplatesServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert TemplatesServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (TemplatesServiceClient, "grpc"), - (TemplatesServiceAsyncClient, "grpc_asyncio"), - (TemplatesServiceClient, "rest"), -]) -def test_templates_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.TemplatesServiceGrpcTransport, "grpc"), - (transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.TemplatesServiceRestTransport, "rest"), -]) -def test_templates_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (TemplatesServiceClient, "grpc"), - (TemplatesServiceAsyncClient, "grpc_asyncio"), - (TemplatesServiceClient, "rest"), -]) -def test_templates_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://dataflow.googleapis.com' - ) - - -def test_templates_service_client_get_transport_class(): - transport = TemplatesServiceClient.get_transport_class() - available_transports = [ - transports.TemplatesServiceGrpcTransport, - transports.TemplatesServiceRestTransport, - ] - assert transport in available_transports - - transport = TemplatesServiceClient.get_transport_class("grpc") - assert transport == transports.TemplatesServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc"), - (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), -]) -@mock.patch.object(TemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceClient)) -@mock.patch.object(TemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceAsyncClient)) -def test_templates_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(TemplatesServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(TemplatesServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc", "true"), - (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc", "false"), - (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), - (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest", "true"), - (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest", "false"), -]) -@mock.patch.object(TemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceClient)) -@mock.patch.object(TemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_templates_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - TemplatesServiceClient, TemplatesServiceAsyncClient -]) -@mock.patch.object(TemplatesServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceClient)) -@mock.patch.object(TemplatesServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TemplatesServiceAsyncClient)) -def test_templates_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc"), - (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio"), - (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest"), -]) -def test_templates_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc", grpc_helpers), - (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (TemplatesServiceClient, transports.TemplatesServiceRestTransport, "rest", None), -]) -def test_templates_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_templates_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = TemplatesServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport, "grpc", grpc_helpers), - (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_templates_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=None, - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - templates.CreateJobFromTemplateRequest, - dict, -]) -def test_create_job_from_template(request_type, transport: str = 'grpc'): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_from_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - ) - response = client.create_job_from_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == templates.CreateJobFromTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -def test_create_job_from_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_from_template), - '__call__') as call: - client.create_job_from_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == templates.CreateJobFromTemplateRequest() - -@pytest.mark.asyncio -async def test_create_job_from_template_async(transport: str = 'grpc_asyncio', request_type=templates.CreateJobFromTemplateRequest): - client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_from_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - )) - response = await client.create_job_from_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == templates.CreateJobFromTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -@pytest.mark.asyncio -async def test_create_job_from_template_async_from_dict(): - await test_create_job_from_template_async(request_type=dict) - - -def test_create_job_from_template_field_headers(): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = templates.CreateJobFromTemplateRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_from_template), - '__call__') as call: - call.return_value = jobs.Job() - client.create_job_from_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_from_template_field_headers_async(): - client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = templates.CreateJobFromTemplateRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_from_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(jobs.Job()) - await client.create_job_from_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - templates.LaunchTemplateRequest, - dict, -]) -def test_launch_template(request_type, transport: str = 'grpc'): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = templates.LaunchTemplateResponse( - ) - response = client.launch_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == templates.LaunchTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.LaunchTemplateResponse) - - -def test_launch_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_template), - '__call__') as call: - client.launch_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == templates.LaunchTemplateRequest() - -@pytest.mark.asyncio -async def test_launch_template_async(transport: str = 'grpc_asyncio', request_type=templates.LaunchTemplateRequest): - client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(templates.LaunchTemplateResponse( - )) - response = await client.launch_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == templates.LaunchTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.LaunchTemplateResponse) - - -@pytest.mark.asyncio -async def test_launch_template_async_from_dict(): - await test_launch_template_async(request_type=dict) - - -def test_launch_template_field_headers(): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = templates.LaunchTemplateRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_template), - '__call__') as call: - call.return_value = templates.LaunchTemplateResponse() - client.launch_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_launch_template_field_headers_async(): - client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = templates.LaunchTemplateRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.launch_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(templates.LaunchTemplateResponse()) - await client.launch_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - templates.GetTemplateRequest, - dict, -]) -def test_get_template(request_type, transport: str = 'grpc'): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = templates.GetTemplateResponse( - template_type=templates.GetTemplateResponse.TemplateType.LEGACY, - ) - response = client.get_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == templates.GetTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.GetTemplateResponse) - assert response.template_type == templates.GetTemplateResponse.TemplateType.LEGACY - - -def test_get_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_template), - '__call__') as call: - client.get_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == templates.GetTemplateRequest() - -@pytest.mark.asyncio -async def test_get_template_async(transport: str = 'grpc_asyncio', request_type=templates.GetTemplateRequest): - client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(templates.GetTemplateResponse( - template_type=templates.GetTemplateResponse.TemplateType.LEGACY, - )) - response = await client.get_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == templates.GetTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.GetTemplateResponse) - assert response.template_type == templates.GetTemplateResponse.TemplateType.LEGACY - - -@pytest.mark.asyncio -async def test_get_template_async_from_dict(): - await test_get_template_async(request_type=dict) - - -def test_get_template_field_headers(): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = templates.GetTemplateRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_template), - '__call__') as call: - call.return_value = templates.GetTemplateResponse() - client.get_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_template_field_headers_async(): - client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = templates.GetTemplateRequest() - - request.project_id = 'project_id_value' - request.location = 'location_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(templates.GetTemplateResponse()) - await client.get_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project_id=project_id_value&location=location_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - templates.CreateJobFromTemplateRequest, - dict, -]) -def test_create_job_from_template_rest(request_type): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = jobs.Job( - id='id_value', - project_id='project_id_value', - name='name_value', - type_=environment.JobType.JOB_TYPE_BATCH, - steps_location='steps_location_value', - current_state=jobs.JobState.JOB_STATE_STOPPED, - requested_state=jobs.JobState.JOB_STATE_STOPPED, - replace_job_id='replace_job_id_value', - client_request_id='client_request_id_value', - replaced_by_job_id='replaced_by_job_id_value', - temp_files=['temp_files_value'], - location='location_value', - created_from_snapshot_id='created_from_snapshot_id_value', - satisfies_pzs=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = jobs.Job.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_job_from_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, jobs.Job) - assert response.id == 'id_value' - assert response.project_id == 'project_id_value' - assert response.name == 'name_value' - assert response.type_ == environment.JobType.JOB_TYPE_BATCH - assert response.steps_location == 'steps_location_value' - assert response.current_state == jobs.JobState.JOB_STATE_STOPPED - assert response.requested_state == jobs.JobState.JOB_STATE_STOPPED - assert response.replace_job_id == 'replace_job_id_value' - assert response.client_request_id == 'client_request_id_value' - assert response.replaced_by_job_id == 'replaced_by_job_id_value' - assert response.temp_files == ['temp_files_value'] - assert response.location == 'location_value' - assert response.created_from_snapshot_id == 'created_from_snapshot_id_value' - assert response.satisfies_pzs is True - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_job_from_template_rest_interceptors(null_interceptor): - transport = transports.TemplatesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.TemplatesServiceRestInterceptor(), - ) - client = TemplatesServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.TemplatesServiceRestInterceptor, "post_create_job_from_template") as post, \ - mock.patch.object(transports.TemplatesServiceRestInterceptor, "pre_create_job_from_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = templates.CreateJobFromTemplateRequest.pb(templates.CreateJobFromTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = jobs.Job.to_json(jobs.Job()) - - request = templates.CreateJobFromTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = jobs.Job() - - client.create_job_from_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_job_from_template_rest_bad_request(transport: str = 'rest', request_type=templates.CreateJobFromTemplateRequest): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_job_from_template(request) - - -def test_create_job_from_template_rest_error(): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - templates.LaunchTemplateRequest, - dict, -]) -def test_launch_template_rest(request_type): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request_init["launch_parameters"] = {'job_name': 'job_name_value', 'parameters': {}, 'environment': {'num_workers': 1212, 'max_workers': 1202, 'zone': 'zone_value', 'service_account_email': 'service_account_email_value', 'temp_location': 'temp_location_value', 'bypass_temp_dir_validation': True, 'machine_type': 'machine_type_value', 'additional_experiments': ['additional_experiments_value1', 'additional_experiments_value2'], 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'additional_user_labels': {}, 'kms_key_name': 'kms_key_name_value', 'ip_configuration': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'enable_streaming_engine': True}, 'update': True, 'transform_name_mapping': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = templates.LaunchTemplateResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = templates.LaunchTemplateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.launch_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.LaunchTemplateResponse) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_launch_template_rest_interceptors(null_interceptor): - transport = transports.TemplatesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.TemplatesServiceRestInterceptor(), - ) - client = TemplatesServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.TemplatesServiceRestInterceptor, "post_launch_template") as post, \ - mock.patch.object(transports.TemplatesServiceRestInterceptor, "pre_launch_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = templates.LaunchTemplateRequest.pb(templates.LaunchTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = templates.LaunchTemplateResponse.to_json(templates.LaunchTemplateResponse()) - - request = templates.LaunchTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = templates.LaunchTemplateResponse() - - client.launch_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_launch_template_rest_bad_request(transport: str = 'rest', request_type=templates.LaunchTemplateRequest): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request_init["launch_parameters"] = {'job_name': 'job_name_value', 'parameters': {}, 'environment': {'num_workers': 1212, 'max_workers': 1202, 'zone': 'zone_value', 'service_account_email': 'service_account_email_value', 'temp_location': 'temp_location_value', 'bypass_temp_dir_validation': True, 'machine_type': 'machine_type_value', 'additional_experiments': ['additional_experiments_value1', 'additional_experiments_value2'], 'network': 'network_value', 'subnetwork': 'subnetwork_value', 'additional_user_labels': {}, 'kms_key_name': 'kms_key_name_value', 'ip_configuration': 1, 'worker_region': 'worker_region_value', 'worker_zone': 'worker_zone_value', 'enable_streaming_engine': True}, 'update': True, 'transform_name_mapping': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.launch_template(request) - - -def test_launch_template_rest_error(): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - templates.GetTemplateRequest, - dict, -]) -def test_get_template_rest(request_type): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = templates.GetTemplateResponse( - template_type=templates.GetTemplateResponse.TemplateType.LEGACY, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = templates.GetTemplateResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_template(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, templates.GetTemplateResponse) - assert response.template_type == templates.GetTemplateResponse.TemplateType.LEGACY - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_template_rest_interceptors(null_interceptor): - transport = transports.TemplatesServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.TemplatesServiceRestInterceptor(), - ) - client = TemplatesServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.TemplatesServiceRestInterceptor, "post_get_template") as post, \ - mock.patch.object(transports.TemplatesServiceRestInterceptor, "pre_get_template") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = templates.GetTemplateRequest.pb(templates.GetTemplateRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = templates.GetTemplateResponse.to_json(templates.GetTemplateResponse()) - - request = templates.GetTemplateRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = templates.GetTemplateResponse() - - client.get_template(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_template_rest_bad_request(transport: str = 'rest', request_type=templates.GetTemplateRequest): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'project_id': 'sample1', 'location': 'sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_template(request) - - -def test_get_template_rest_error(): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.TemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.TemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TemplatesServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.TemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TemplatesServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TemplatesServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.TemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TemplatesServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.TemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = TemplatesServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.TemplatesServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.TemplatesServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.TemplatesServiceGrpcTransport, - transports.TemplatesServiceGrpcAsyncIOTransport, - transports.TemplatesServiceRestTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = TemplatesServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.TemplatesServiceGrpcTransport, - ) - -def test_templates_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.TemplatesServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_templates_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.TemplatesServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_job_from_template', - 'launch_template', - 'get_template', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_templates_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TemplatesServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id="octopus", - ) - - -def test_templates_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dataflow_v1beta3.services.templates_service.transports.TemplatesServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TemplatesServiceTransport() - adc.assert_called_once() - - -def test_templates_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TemplatesServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TemplatesServiceGrpcTransport, - transports.TemplatesServiceGrpcAsyncIOTransport, - ], -) -def test_templates_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/compute', 'https://www.googleapis.com/auth/compute.readonly', 'https://www.googleapis.com/auth/userinfo.email',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TemplatesServiceGrpcTransport, - transports.TemplatesServiceGrpcAsyncIOTransport, - transports.TemplatesServiceRestTransport, - ], -) -def test_templates_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.TemplatesServiceGrpcTransport, grpc_helpers), - (transports.TemplatesServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_templates_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dataflow.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/compute', - 'https://www.googleapis.com/auth/compute.readonly', - 'https://www.googleapis.com/auth/userinfo.email', -), - scopes=["1", "2"], - default_host="dataflow.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport]) -def test_templates_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - -def test_templates_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.TemplatesServiceRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_templates_service_host_no_port(transport_name): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_templates_service_host_with_port(transport_name): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dataflow.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'dataflow.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://dataflow.googleapis.com:8000' - ) - -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_templates_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = TemplatesServiceClient( - credentials=creds1, - transport=transport_name, - ) - client2 = TemplatesServiceClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.create_job_from_template._session - session2 = client2.transport.create_job_from_template._session - assert session1 != session2 - session1 = client1.transport.launch_template._session - session2 = client2.transport.launch_template._session - assert session1 != session2 - session1 = client1.transport.get_template._session - session2 = client2.transport.get_template._session - assert session1 != session2 -def test_templates_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TemplatesServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_templates_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TemplatesServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport]) -def test_templates_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.TemplatesServiceGrpcTransport, transports.TemplatesServiceGrpcAsyncIOTransport]) -def test_templates_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = TemplatesServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = TemplatesServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = TemplatesServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = TemplatesServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = TemplatesServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = TemplatesServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = TemplatesServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = TemplatesServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = TemplatesServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = TemplatesServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = TemplatesServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = TemplatesServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = TemplatesServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = TemplatesServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = TemplatesServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.TemplatesServiceTransport, '_prep_wrapped_messages') as prep: - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.TemplatesServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = TemplatesServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = TemplatesServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'rest', - 'grpc', - ] - for transport in transports: - client = TemplatesServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (TemplatesServiceClient, transports.TemplatesServiceGrpcTransport), - (TemplatesServiceAsyncClient, transports.TemplatesServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - )